From c334b5ed045e0f395e70ed94a46c5c5e7176b53b Mon Sep 17 00:00:00 2001 From: nijkah Date: Tue, 17 May 2022 15:19:00 +0900 Subject: [PATCH 1/4] Add mmdetbase --- .../_base_/datasets/cityscapes_detection.py | 56 + .../_base_/datasets/cityscapes_instance.py | 56 + .../mmdet/_base_/datasets/coco_detection.py | 49 + .../mmdet/_base_/datasets/coco_instance.py | 49 + .../_base_/datasets/coco_instance_semantic.py | 54 + .../mmdet/_base_/datasets/coco_panoptic.py | 59 ++ configs/mmdet/_base_/datasets/deepfashion.py | 53 + .../_base_/datasets/lvis_v0.5_instance.py | 24 + .../mmdet/_base_/datasets/lvis_v1_instance.py | 24 + .../_base_/datasets/openimages_detection.py | 65 ++ configs/mmdet/_base_/datasets/voc0712.py | 55 + configs/mmdet/_base_/datasets/wider_face.py | 63 ++ configs/mmdet/_base_/default_runtime.py | 27 + .../models/cascade_mask_rcnn_r50_fpn.py | 196 ++++ .../_base_/models/cascade_rcnn_r50_fpn.py | 179 ++++ .../mmdet/_base_/models/fast_rcnn_r50_fpn.py | 62 ++ .../_base_/models/faster_rcnn_r50_caffe_c4.py | 117 +++ .../models/faster_rcnn_r50_caffe_dc5.py | 105 ++ .../_base_/models/faster_rcnn_r50_fpn.py | 108 ++ .../_base_/models/mask_rcnn_r50_caffe_c4.py | 125 +++ .../mmdet/_base_/models/mask_rcnn_r50_fpn.py | 120 +++ .../mmdet/_base_/models/retinanet_r50_fpn.py | 60 ++ .../mmdet/_base_/models/rpn_r50_caffe_c4.py | 58 ++ configs/mmdet/_base_/models/rpn_r50_fpn.py | 58 ++ configs/mmdet/_base_/models/ssd300.py | 56 + configs/mmdet/_base_/schedules/schedule_1x.py | 11 + .../mmdet/_base_/schedules/schedule_20e.py | 11 + configs/mmdet/_base_/schedules/schedule_2x.py | 11 + configs/mmdet/albu_example/README.md | 31 + .../mask_rcnn_r50_fpn_albu_1x_coco.py | 73 ++ configs/mmdet/atss/README.md | 31 + configs/mmdet/atss/atss_r101_fpn_1x_coco.py | 6 + configs/mmdet/atss/atss_r50_fpn_1x_coco.py | 62 ++ configs/mmdet/atss/metafile.yml | 60 ++ configs/mmdet/autoassign/README.md | 35 + .../autoassign_r50_fpn_8x2_1x_coco.py | 85 ++ configs/mmdet/autoassign/metafile.yml | 33 + configs/mmdet/carafe/README.md | 42 + .../faster_rcnn_r50_fpn_carafe_1x_coco.py | 50 + .../mask_rcnn_r50_fpn_carafe_1x_coco.py | 60 ++ configs/mmdet/carafe/metafile.yml | 55 + configs/mmdet/cascade_rcnn/README.md | 79 ++ ...ascade_mask_rcnn_r101_caffe_fpn_1x_coco.py | 7 + ...ask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py | 7 + .../cascade_mask_rcnn_r101_fpn_1x_coco.py | 6 + .../cascade_mask_rcnn_r101_fpn_20e_coco.py | 6 + ...cade_mask_rcnn_r101_fpn_mstrain_3x_coco.py | 6 + ...cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py | 41 + ...mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py | 49 + .../cascade_mask_rcnn_r50_fpn_1x_coco.py | 5 + .../cascade_mask_rcnn_r50_fpn_20e_coco.py | 5 + ...scade_mask_rcnn_r50_fpn_mstrain_3x_coco.py | 4 + ...ascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py | 14 + ...scade_mask_rcnn_x101_32x4d_fpn_20e_coco.py | 14 + ...ask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py | 14 + ...ask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py | 60 ++ ...ascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py | 14 + ...scade_mask_rcnn_x101_64x4d_fpn_20e_coco.py | 14 + ...ask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py | 14 + .../cascade_rcnn_r101_caffe_fpn_1x_coco.py | 7 + .../cascade_rcnn_r101_fpn_1x_coco.py | 6 + .../cascade_rcnn_r101_fpn_20e_coco.py | 6 + .../cascade_rcnn_r50_caffe_fpn_1x_coco.py | 42 + .../cascade_rcnn_r50_fpn_1x_coco.py | 5 + .../cascade_rcnn_r50_fpn_20e_coco.py | 4 + .../cascade_rcnn_x101_32x4d_fpn_1x_coco.py | 14 + .../cascade_rcnn_x101_32x4d_fpn_20e_coco.py | 14 + .../cascade_rcnn_x101_64x4d_fpn_1x_coco.py | 15 + .../cascade_rcnn_x101_64x4d_fpn_20e_coco.py | 15 + configs/mmdet/cascade_rcnn/metafile.yml | 525 ++++++++++ configs/mmdet/cascade_rpn/README.md | 41 + .../crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py | 77 ++ .../crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py | 92 ++ .../cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py | 77 ++ configs/mmdet/cascade_rpn/metafile.yml | 44 + configs/mmdet/centernet/README.md | 40 + .../centernet/centernet_resnet18_140e_coco.py | 3 + .../centernet_resnet18_dcnv2_140e_coco.py | 127 +++ configs/mmdet/centernet/metafile.yml | 46 + configs/mmdet/centripetalnet/README.md | 36 + ...lnet_hourglass104_mstest_16x6_210e_coco.py | 110 ++ configs/mmdet/centripetalnet/metafile.yml | 39 + configs/mmdet/cityscapes/README.md | 46 + .../faster_rcnn_r50_fpn_1x_cityscapes.py | 44 + .../mask_rcnn_r50_fpn_1x_cityscapes.py | 51 + .../mmdet/common/lsj_100e_coco_instance.py | 90 ++ .../common/mstrain-poly_3x_coco_instance.py | 80 ++ configs/mmdet/common/mstrain_3x_coco.py | 76 ++ .../mmdet/common/mstrain_3x_coco_instance.py | 76 ++ .../mmdet/common/ssj_270k_coco_instance.py | 91 ++ .../common/ssj_scp_270k_coco_instance.py | 97 ++ configs/mmdet/cornernet/README.md | 43 + ...rnet_hourglass104_mstest_10x5_210e_coco.py | 110 ++ ...rnet_hourglass104_mstest_32x3_210e_coco.py | 110 ++ ...ernet_hourglass104_mstest_8x6_210e_coco.py | 110 ++ configs/mmdet/cornernet/metafile.yml | 83 ++ configs/mmdet/dcn/README.md | 48 + ..._mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py | 5 + ...e_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py | 5 + ...rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py | 5 + ...scade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py | 5 + ...ascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py | 5 + ...aster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py | 5 + ...faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py | 5 + .../dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py | 12 + ...rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py | 16 + .../mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py | 5 + .../mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py | 5 + ...k_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py | 7 + configs/mmdet/dcn/metafile.yml | 272 +++++ configs/mmdet/dcnv2/README.md | 37 + ...aster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py | 5 + ...cnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py | 5 + .../faster_rcnn_r50_fpn_mdpool_1x_coco.py | 12 + ..._rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py | 7 + .../mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py | 5 + configs/mmdet/dcnv2/metafile.yml | 123 +++ configs/mmdet/deepfashion/README.md | 70 ++ .../mask_rcnn_r50_fpn_15e_deepfashion.py | 10 + configs/mmdet/deformable_detr/README.md | 41 + .../deformable_detr_r50_16x2_50e_coco.py | 177 ++++ ...eformable_detr_refine_r50_16x2_50e_coco.py | 2 + ..._detr_twostage_refine_r50_16x2_50e_coco.py | 2 + configs/mmdet/deformable_detr/metafile.yml | 56 + configs/mmdet/detectors/README.md | 69 ++ .../detectors/cascade_rcnn_r50_rfp_1x_coco.py | 28 + .../detectors/cascade_rcnn_r50_sac_1x_coco.py | 12 + .../detectors_cascade_rcnn_r50_1x_coco.py | 32 + .../detectors/detectors_htc_r101_20e_coco.py | 28 + .../detectors/detectors_htc_r50_1x_coco.py | 28 + .../mmdet/detectors/htc_r50_rfp_1x_coco.py | 24 + .../mmdet/detectors/htc_r50_sac_1x_coco.py | 8 + configs/mmdet/detectors/metafile.yml | 114 ++ configs/mmdet/detr/README.md | 37 + configs/mmdet/detr/detr_r50_8x2_150e_coco.py | 150 +++ configs/mmdet/detr/metafile.yml | 33 + configs/mmdet/double_heads/README.md | 32 + .../dh_faster_rcnn_r50_fpn_1x_coco.py | 23 + configs/mmdet/double_heads/metafile.yml | 41 + configs/mmdet/dyhead/README.md | 46 + .../atss_r50_caffe_fpn_dyhead_1x_coco.py | 112 ++ .../dyhead/atss_r50_fpn_dyhead_1x_coco.py | 65 ++ configs/mmdet/dyhead/metafile.yml | 63 ++ configs/mmdet/dynamic_rcnn/README.md | 30 + .../dynamic_rcnn_r50_fpn_1x_coco.py | 28 + configs/mmdet/dynamic_rcnn/metafile.yml | 35 + configs/mmdet/efficientnet/README.md | 30 + configs/mmdet/efficientnet/metafile.yml | 19 + ...retinanet_effb3_fpn_crop896_8x4_1x_coco.py | 94 ++ configs/mmdet/empirical_attention/README.md | 33 + ...ter_rcnn_r50_fpn_attention_0010_1x_coco.py | 13 + ...rcnn_r50_fpn_attention_0010_dcn_1x_coco.py | 16 + ...ter_rcnn_r50_fpn_attention_1111_1x_coco.py | 13 + ...rcnn_r50_fpn_attention_1111_dcn_1x_coco.py | 16 + .../mmdet/empirical_attention/metafile.yml | 103 ++ configs/mmdet/fast_rcnn/README.md | 72 ++ .../fast_rcnn_r101_caffe_fpn_1x_coco.py | 7 + .../fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py | 6 + .../fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py | 6 + .../fast_rcnn_r50_caffe_fpn_1x_coco.py | 48 + .../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py | 52 + .../fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py | 5 + configs/mmdet/faster_rcnn/README.md | 88 ++ .../faster_rcnn_r101_caffe_fpn_1x_coco.py | 7 + ...ter_rcnn_r101_caffe_fpn_mstrain_3x_coco.py | 49 + .../faster_rcnn_r101_fpn_1x_coco.py | 6 + .../faster_rcnn_r101_fpn_2x_coco.py | 6 + .../faster_rcnn_r101_fpn_mstrain_3x_coco.py | 7 + .../faster_rcnn_r50_caffe_c4_1x_coco.py | 39 + ...aster_rcnn_r50_caffe_c4_mstrain_1x_coco.py | 38 + .../faster_rcnn_r50_caffe_dc5_1x_coco.py | 37 + ...ster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py | 42 + ...ster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py | 4 + .../faster_rcnn_r50_caffe_fpn_1x_coco.py | 41 + .../faster_rcnn_r50_caffe_fpn_90k_coco.py | 15 + ..._fpn_mstrain_1x_coco-person-bicycle-car.py | 9 + ...nn_r50_caffe_fpn_mstrain_1x_coco-person.py | 9 + ...ster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py | 46 + ...ster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py | 4 + ...ster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py | 47 + ...ter_rcnn_r50_caffe_fpn_mstrain_90k_coco.py | 15 + .../faster_rcnn_r50_fpn_1x_coco.py | 5 + .../faster_rcnn_r50_fpn_2x_coco.py | 5 + ...faster_rcnn_r50_fpn_bounded_iou_1x_coco.py | 6 + .../faster_rcnn_r50_fpn_ciou_1x_coco.py | 6 + .../faster_rcnn_r50_fpn_fp16_1x_coco.py | 3 + .../faster_rcnn_r50_fpn_giou_1x_coco.py | 6 + .../faster_rcnn_r50_fpn_iou_1x_coco.py | 6 + .../faster_rcnn_r50_fpn_mstrain_3x_coco.py | 3 + .../faster_rcnn_r50_fpn_ohem_1x_coco.py | 2 + .../faster_rcnn_r50_fpn_soft_nms_1x_coco.py | 12 + ...aster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py | 17 + .../faster_rcnn_x101_32x4d_fpn_1x_coco.py | 14 + .../faster_rcnn_x101_32x4d_fpn_2x_coco.py | 14 + ...ter_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py | 16 + ...ter_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py | 62 ++ .../faster_rcnn_x101_64x4d_fpn_1x_coco.py | 14 + .../faster_rcnn_x101_64x4d_fpn_2x_coco.py | 14 + ...ter_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py | 16 + configs/mmdet/faster_rcnn/metafile.yml | 451 ++++++++ configs/mmdet/fcos/README.md | 45 + ...nreg-giou_r50_caffe_fpn_gn-head_1x_coco.py | 54 + ...-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py | 56 + ...os_center_r50_caffe_fpn_gn-head_1x_coco.py | 2 + .../fcos_r101_caffe_fpn_gn-head_1x_coco.py | 7 + ...ffe_fpn_gn-head_mstrain_640-800_2x_coco.py | 47 + .../fcos_r50_caffe_fpn_gn-head_1x_coco.py | 106 ++ .../fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py | 4 + ...ffe_fpn_gn-head_mstrain_640-800_2x_coco.py | 39 + ...x4d_fpn_gn-head_mstrain_640-800_2x_coco.py | 60 ++ configs/mmdet/fcos/metafile.yml | 146 +++ configs/mmdet/foveabox/README.md | 53 + ...ovea_align_r101_fpn_gn-head_4x4_2x_coco.py | 12 + ...fpn_gn-head_mstrain_640-800_4x4_2x_coco.py | 29 + ...fovea_align_r50_fpn_gn-head_4x4_2x_coco.py | 10 + ...fpn_gn-head_mstrain_640-800_4x4_2x_coco.py | 25 + .../foveabox/fovea_r101_fpn_4x4_1x_coco.py | 6 + .../foveabox/fovea_r101_fpn_4x4_2x_coco.py | 6 + .../foveabox/fovea_r50_fpn_4x4_1x_coco.py | 52 + .../foveabox/fovea_r50_fpn_4x4_2x_coco.py | 4 + configs/mmdet/foveabox/metafile.yml | 172 ++++ configs/mmdet/fpg/README.md | 43 + ...er_rcnn_r50_fpg-chn128_crop640_50e_coco.py | 9 + .../faster_rcnn_r50_fpg_crop640_50e_coco.py | 48 + .../faster_rcnn_r50_fpn_crop640_50e_coco.py | 73 ++ ...sk_rcnn_r50_fpg-chn128_crop640_50e_coco.py | 10 + .../fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py | 48 + .../fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py | 79 ++ configs/mmdet/fpg/metafile.yml | 104 ++ ...tinanet_r50_fpg-chn128_crop640_50e_coco.py | 5 + .../fpg/retinanet_r50_fpg_crop640_50e_coco.py | 53 + configs/mmdet/free_anchor/README.md | 37 + configs/mmdet/free_anchor/metafile.yml | 79 ++ .../retinanet_free_anchor_r101_fpn_1x_coco.py | 6 + .../retinanet_free_anchor_r50_fpn_1x_coco.py | 22 + ...anet_free_anchor_x101_32x4d_fpn_1x_coco.py | 13 + configs/mmdet/fsaf/README.md | 57 + configs/mmdet/fsaf/fsaf_r101_fpn_1x_coco.py | 6 + configs/mmdet/fsaf/fsaf_r50_fpn_1x_coco.py | 48 + .../mmdet/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py | 14 + configs/mmdet/fsaf/metafile.yml | 80 ++ configs/mmdet/gcnet/README.md | 69 ++ ..._x101_32x4d_fpn_syncbn-backbone_1x_coco.py | 4 + ...fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py | 4 + ...kbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py | 11 + ...ckbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py | 11 + ...n_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py | 11 + ...pn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py | 11 + ...ask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py | 8 + ...mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py | 8 + ...k_rcnn_r101_fpn_syncbn-backbone_1x_coco.py | 4 + ...n_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py | 11 + ...pn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py | 11 + ...mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py | 8 + .../mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py | 8 + ...sk_rcnn_r50_fpn_syncbn-backbone_1x_coco.py | 4 + ...n_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py | 11 + ...pn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py | 11 + ..._x101_32x4d_fpn_syncbn-backbone_1x_coco.py | 4 + ...n_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py | 11 + ...pn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py | 11 + configs/mmdet/gcnet/metafile.yml | 440 ++++++++ configs/mmdet/gfl/README.md | 42 + ...fl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py | 15 + .../mmdet/gfl/gfl_r101_fpn_mstrain_2x_coco.py | 13 + configs/mmdet/gfl/gfl_r50_fpn_1x_coco.py | 57 + .../mmdet/gfl/gfl_r50_fpn_mstrain_2x_coco.py | 22 + ...1_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py | 18 + .../gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py | 16 + configs/mmdet/gfl/metafile.yml | 134 +++ configs/mmdet/ghm/README.md | 33 + configs/mmdet/ghm/metafile.yml | 101 ++ .../ghm/retinanet_ghm_r101_fpn_1x_coco.py | 6 + .../ghm/retinanet_ghm_r50_fpn_1x_coco.py | 19 + .../retinanet_ghm_x101_32x4d_fpn_1x_coco.py | 14 + .../retinanet_ghm_x101_64x4d_fpn_1x_coco.py | 14 + configs/mmdet/gn+ws/README.md | 54 + .../faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py | 6 + .../faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py | 16 + ...r_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py | 18 + ...er_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py | 18 + ..._rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py | 4 + .../mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py | 6 + ...k_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py | 4 + .../mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py | 20 + ...x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py | 4 + ...k_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py | 19 + ..._x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py | 4 + ...sk_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py | 19 + configs/mmdet/gn+ws/metafile.yml | 263 +++++ configs/mmdet/gn/README.md | 41 + .../gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py | 7 + .../gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py | 5 + .../gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py | 49 + .../gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py | 5 + ...ask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py | 17 + ...ask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py | 5 + configs/mmdet/gn/metafile.yml | 162 +++ configs/mmdet/grid_rcnn/README.md | 47 + .../grid_rcnn_r101_fpn_gn-head_2x_coco.py | 7 + .../grid_rcnn_r50_fpn_gn-head_1x_coco.py | 11 + .../grid_rcnn_r50_fpn_gn-head_2x_coco.py | 131 +++ ...rid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py | 24 + ...rid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py | 13 + configs/mmdet/grid_rcnn/metafile.yml | 101 ++ configs/mmdet/groie/README.md | 72 ++ .../faster_rcnn_r50_fpn_groie_1x_coco.py | 25 + ...grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py | 45 + ...cbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py | 45 + .../groie/mask_rcnn_r50_fpn_groie_1x_coco.py | 45 + ...cbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py | 45 + configs/mmdet/groie/metafile.yml | 93 ++ configs/mmdet/guided_anchoring/README.md | 59 ++ .../ga_fast_r50_caffe_fpn_1x_coco.py | 65 ++ .../ga_faster_r101_caffe_fpn_1x_coco.py | 7 + .../ga_faster_r50_caffe_fpn_1x_coco.py | 65 ++ .../ga_faster_r50_fpn_1x_coco.py | 65 ++ .../ga_faster_x101_32x4d_fpn_1x_coco.py | 14 + .../ga_faster_x101_64x4d_fpn_1x_coco.py | 14 + .../ga_retinanet_r101_caffe_fpn_1x_coco.py | 7 + .../ga_retinanet_r101_caffe_fpn_mstrain_2x.py | 169 +++ .../ga_retinanet_r50_caffe_fpn_1x_coco.py | 62 ++ .../ga_retinanet_r50_fpn_1x_coco.py | 62 ++ .../ga_retinanet_x101_32x4d_fpn_1x_coco.py | 14 + .../ga_retinanet_x101_64x4d_fpn_1x_coco.py | 14 + .../ga_rpn_r101_caffe_fpn_1x_coco.py | 8 + .../ga_rpn_r50_caffe_fpn_1x_coco.py | 58 ++ .../ga_rpn_r50_fpn_1x_coco.py | 58 ++ .../ga_rpn_x101_32x4d_fpn_1x_coco.py | 14 + .../ga_rpn_x101_64x4d_fpn_1x_coco.py | 14 + configs/mmdet/guided_anchoring/metafile.yml | 246 +++++ configs/mmdet/hrnet/README.md | 101 ++ ...cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py | 11 + ...cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py | 40 + ...cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py | 12 + .../cascade_rcnn_hrnetv2p_w18_20e_coco.py | 11 + .../cascade_rcnn_hrnetv2p_w32_20e_coco.py | 40 + .../cascade_rcnn_hrnetv2p_w40_20e_coco.py | 12 + .../hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py | 11 + .../hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py | 5 + .../hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py | 37 + .../hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py | 4 + .../hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py | 11 + .../hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py | 4 + .../fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py | 10 + .../fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py | 4 + ...w18_gn-head_mstrain_640-800_4x4_2x_coco.py | 10 + .../fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py | 70 ++ .../fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py | 4 + ...w32_gn-head_mstrain_640-800_4x4_2x_coco.py | 39 + ...w40_gn-head_mstrain_640-800_4x4_2x_coco.py | 11 + .../mmdet/hrnet/htc_hrnetv2p_w18_20e_coco.py | 10 + .../mmdet/hrnet/htc_hrnetv2p_w32_20e_coco.py | 37 + .../mmdet/hrnet/htc_hrnetv2p_w40_20e_coco.py | 11 + .../mmdet/hrnet/htc_hrnetv2p_w40_28e_coco.py | 4 + .../hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py | 4 + .../hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py | 10 + .../hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py | 4 + .../hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py | 37 + .../hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py | 4 + .../hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py | 11 + .../hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py | 4 + configs/mmdet/hrnet/metafile.yml | 971 ++++++++++++++++++ configs/mmdet/htc/README.md | 67 ++ configs/mmdet/htc/htc_r101_fpn_20e_coco.py | 9 + configs/mmdet/htc/htc_r50_fpn_1x_coco.py | 56 + configs/mmdet/htc/htc_r50_fpn_20e_coco.py | 4 + .../htc_without_semantic_r50_fpn_1x_coco.py | 236 +++++ .../htc/htc_x101_32x4d_fpn_16x1_20e_coco.py | 19 + .../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py | 19 + ...nv_c3-c5_mstrain_400_1400_16x1_20e_coco.py | 43 + configs/mmdet/htc/metafile.yml | 165 +++ configs/mmdet/instaboost/README.md | 58 ++ ...e_mask_rcnn_r101_fpn_instaboost_4x_coco.py | 7 + ...de_mask_rcnn_r50_fpn_instaboost_4x_coco.py | 28 + ..._rcnn_x101_64x4d_fpn_instaboost_4x_coco.py | 14 + .../mask_rcnn_r101_fpn_instaboost_4x_coco.py | 6 + .../mask_rcnn_r50_fpn_instaboost_4x_coco.py | 28 + ..._rcnn_x101_64x4d_fpn_instaboost_4x_coco.py | 14 + configs/mmdet/instaboost/metafile.yml | 99 ++ configs/mmdet/lad/README.md | 44 + .../mmdet/lad/lad_r101_paa_r50_fpn_coco_1x.py | 126 +++ .../mmdet/lad/lad_r50_paa_r101_fpn_coco_1x.py | 125 +++ configs/mmdet/lad/metafile.yml | 42 + configs/mmdet/ld/README.md | 43 + .../ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py | 44 + .../mmdet/ld/ld_r18_gflv1_r101_fpn_coco_1x.py | 62 ++ .../mmdet/ld/ld_r34_gflv1_r101_fpn_coco_1x.py | 19 + .../mmdet/ld/ld_r50_gflv1_r101_fpn_coco_1x.py | 19 + configs/mmdet/ld/metafile.yml | 72 ++ configs/mmdet/legacy_1.x/README.md | 53 + .../cascade_mask_rcnn_r50_fpn_1x_coco_v1.py | 79 ++ .../faster_rcnn_r50_fpn_1x_coco_v1.py | 38 + .../mask_rcnn_r50_fpn_1x_coco_v1.py | 34 + .../retinanet_r50_caffe_fpn_1x_coco_v1.py | 41 + .../retinanet_r50_fpn_1x_coco_v1.py | 17 + configs/mmdet/legacy_1.x/ssd300_coco_v1.py | 84 ++ configs/mmdet/libra_rcnn/README.md | 53 + .../libra_fast_rcnn_r50_fpn_1x_coco.py | 50 + .../libra_faster_rcnn_r101_fpn_1x_coco.py | 6 + .../libra_faster_rcnn_r50_fpn_1x_coco.py | 41 + ...ibra_faster_rcnn_x101_64x4d_fpn_1x_coco.py | 14 + .../libra_retinanet_r50_fpn_1x_coco.py | 26 + configs/mmdet/libra_rcnn/metafile.yml | 99 ++ configs/mmdet/lvis/README.md | 54 + ..._r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py | 6 + ...101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py | 6 + ...n_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py | 31 + ...r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py | 31 + ...32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py | 14 + ...x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py | 14 + ...64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py | 14 + ...x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py | 14 + configs/mmdet/mask2former/README.md | 60 ++ .../mask2former_r101_lsj_8x2_50e_coco.py | 7 + .../mask2former_r50_lsj_8x2_50e_coco.py | 253 +++++ ...win-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py | 5 + ...rmer_swin-b-p4-w12-384_lsj_8x2_50e_coco.py | 42 + ...n-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py | 26 + ...ormer_swin-s-p4-w7-224_lsj_8x2_50e_coco.py | 37 + ...ormer_swin-t-p4-w7-224_lsj_8x2_50e_coco.py | 62 ++ configs/mmdet/mask2former/metafile.yml | 159 +++ configs/mmdet/mask_rcnn/README.md | 59 ++ .../mask_rcnn_r101_caffe_fpn_1x_coco.py | 7 + ...cnn_r101_caffe_fpn_mstrain-poly_3x_coco.py | 55 + .../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py | 6 + .../mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py | 6 + ...mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py | 10 + .../mask_rcnn_r50_caffe_c4_1x_coco.py | 39 + .../mask_rcnn_r50_caffe_fpn_1x_coco.py | 40 + ...rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py | 49 + ...rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py | 4 + ...rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py | 4 + ...mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py | 45 + ...mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py | 61 ++ .../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py | 5 + .../mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py | 5 + .../mask_rcnn_r50_fpn_fp16_1x_coco.py | 3 + .../mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py | 4 + .../mask_rcnn_r50_fpn_poly_1x_coco.py | 23 + .../mask_rcnn_x101_32x4d_fpn_1x_coco.py | 14 + .../mask_rcnn_x101_32x4d_fpn_2x_coco.py | 14 + ...cnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py | 18 + .../mask_rcnn_x101_32x8d_fpn_1x_coco.py | 65 ++ ...cnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py | 60 ++ ...cnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py | 85 ++ .../mask_rcnn_x101_64x4d_fpn_1x_coco.py | 14 + .../mask_rcnn_x101_64x4d_fpn_2x_coco.py | 14 + ...cnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py | 18 + configs/mmdet/mask_rcnn/metafile.yml | 447 ++++++++ configs/mmdet/maskformer/README.md | 52 + .../maskformer_r50_mstrain_16x1_75e_coco.py | 238 +++++ ...er_swin-l-p4-w12_mstrain_64x1_300e_coco.py | 67 ++ configs/mmdet/maskformer/metafile.yml | 43 + configs/mmdet/ms_rcnn/README.md | 36 + configs/mmdet/ms_rcnn/metafile.yml | 159 +++ .../ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py | 7 + .../ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py | 4 + .../ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py | 16 + .../ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py | 4 + .../mmdet/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py | 16 + .../ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py | 14 + .../ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py | 14 + .../ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py | 4 + configs/mmdet/nas_fcos/README.md | 35 + configs/mmdet/nas_fcos/metafile.yml | 44 + ...shead_r50_caffe_fpn_gn-head_4x4_1x_coco.py | 100 ++ ...shead_r50_caffe_fpn_gn-head_4x4_1x_coco.py | 99 ++ configs/mmdet/nas_fpn/README.md | 36 + configs/mmdet/nas_fpn/metafile.yml | 59 ++ .../retinanet_r50_fpn_crop640_50e_coco.py | 85 ++ .../retinanet_r50_nasfpn_crop640_50e_coco.py | 84 ++ configs/mmdet/openimages/README.md | 143 +++ .../faster_rcnn_r50_fpn_32x2_1x_openimages.py | 23 + ...nn_r50_fpn_32x2_1x_openimages_challenge.py | 47 + ...ter_rcnn_r50_fpn_32x2_cas_1x_openimages.py | 5 + ...50_fpn_32x2_cas_1x_openimages_challenge.py | 5 + configs/mmdet/openimages/metafile.yml | 102 ++ .../retinanet_r50_fpn_32x2_1x_openimages.py | 22 + .../openimages/ssd300_32x8_36e_openimages.py | 83 ++ configs/mmdet/paa/README.md | 47 + configs/mmdet/paa/metafile.yml | 104 ++ configs/mmdet/paa/paa_r101_fpn_1x_coco.py | 6 + configs/mmdet/paa/paa_r101_fpn_2x_coco.py | 3 + .../mmdet/paa/paa_r101_fpn_mstrain_3x_coco.py | 6 + configs/mmdet/paa/paa_r50_fpn_1.5x_coco.py | 3 + configs/mmdet/paa/paa_r50_fpn_1x_coco.py | 70 ++ configs/mmdet/paa/paa_r50_fpn_2x_coco.py | 3 + .../mmdet/paa/paa_r50_fpn_mstrain_3x_coco.py | 20 + configs/mmdet/pafpn/README.md | 34 + .../pafpn/faster_rcnn_r50_pafpn_1x_coco.py | 8 + configs/mmdet/pafpn/metafile.yml | 38 + configs/mmdet/panoptic_fpn/README.md | 62 ++ configs/mmdet/panoptic_fpn/metafile.yml | 70 ++ .../panoptic_fpn_r101_fpn_1x_coco.py | 6 + .../panoptic_fpn_r101_fpn_mstrain_3x_coco.py | 6 + .../panoptic_fpn_r50_fpn_1x_coco.py | 33 + .../panoptic_fpn_r50_fpn_mstrain_3x_coco.py | 61 ++ configs/mmdet/pascal_voc/README.md | 40 + ...r_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py | 81 ++ .../faster_rcnn_r50_fpn_1x_voc0712.py | 14 + .../faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py | 75 ++ .../retinanet_r50_fpn_1x_voc0712.py | 14 + configs/mmdet/pascal_voc/ssd300_voc0712.py | 74 ++ configs/mmdet/pascal_voc/ssd512_voc0712.py | 57 + configs/mmdet/pisa/README.md | 50 + configs/mmdet/pisa/metafile.yml | 110 ++ .../pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py | 30 + ...pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py | 30 + .../pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py | 30 + .../pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py | 30 + .../pisa/pisa_retinanet_r50_fpn_1x_coco.py | 7 + .../pisa_retinanet_x101_32x4d_fpn_1x_coco.py | 7 + configs/mmdet/pisa/pisa_ssd300_coco.py | 8 + configs/mmdet/pisa/pisa_ssd512_coco.py | 8 + configs/mmdet/point_rend/README.md | 33 + configs/mmdet/point_rend/metafile.yml | 54 + ...oint_rend_r50_caffe_fpn_mstrain_1x_coco.py | 44 + ...oint_rend_r50_caffe_fpn_mstrain_3x_coco.py | 4 + configs/mmdet/pvt/README.md | 57 + configs/mmdet/pvt/metafile.yml | 243 +++++ .../mmdet/pvt/retinanet_pvt-l_fpn_1x_coco.py | 7 + .../mmdet/pvt/retinanet_pvt-m_fpn_1x_coco.py | 6 + .../mmdet/pvt/retinanet_pvt-s_fpn_1x_coco.py | 6 + .../mmdet/pvt/retinanet_pvt-t_fpn_1x_coco.py | 16 + .../pvt/retinanet_pvtv2-b0_fpn_1x_coco.py | 17 + .../pvt/retinanet_pvtv2-b1_fpn_1x_coco.py | 7 + .../pvt/retinanet_pvtv2-b2_fpn_1x_coco.py | 8 + .../pvt/retinanet_pvtv2-b3_fpn_1x_coco.py | 8 + .../pvt/retinanet_pvtv2-b4_fpn_1x_coco.py | 18 + .../pvt/retinanet_pvtv2-b5_fpn_1x_coco.py | 19 + configs/mmdet/queryinst/README.md | 36 + configs/mmdet/queryinst/metafile.yml | 100 ++ ..._proposals_crop_mstrain_480-800_3x_coco.py | 7 + ...ryinst_r101_fpn_mstrain_480-800_3x_coco.py | 7 + .../queryinst/queryinst_r50_fpn_1x_coco.py | 138 +++ ..._proposals_crop_mstrain_480-800_3x_coco.py | 54 + ...eryinst_r50_fpn_mstrain_480-800_3x_coco.py | 23 + configs/mmdet/regnet/README.md | 122 +++ ..._rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py | 17 + ..._rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py | 63 ++ ..._rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py | 17 + ...sk_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py | 17 + ..._rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py | 17 + ..._rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py | 17 + .../faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py | 57 + .../faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py | 3 + ..._rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py | 61 ++ ..._rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py | 17 + ...er_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py | 17 + ..._rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py | 17 + ..._regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py | 26 + .../mask_rcnn_regnetx-12GF_fpn_1x_coco.py | 17 + .../mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py | 58 ++ ..._regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py | 7 + ..._rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py | 66 ++ ..._regnetx-400MF_fpn_mstrain-poly_3x_coco.py | 26 + .../mask_rcnn_regnetx-4GF_fpn_1x_coco.py | 17 + ...nn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py | 26 + .../mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py | 17 + ..._regnetx-800MF_fpn_mstrain-poly_3x_coco.py | 26 + .../mask_rcnn_regnetx-8GF_fpn_1x_coco.py | 17 + configs/mmdet/regnet/metafile.yml | 797 ++++++++++++++ .../retinanet_regnetx-1.6GF_fpn_1x_coco.py | 17 + .../retinanet_regnetx-3.2GF_fpn_1x_coco.py | 59 ++ .../retinanet_regnetx-800MF_fpn_1x_coco.py | 17 + configs/mmdet/reppoints/README.md | 59 ++ ...50_grid_center_fpn_gn-neck+head_1x_coco.py | 2 + .../bbox_r50_grid_fpn_gn-neck+head_1x_coco.py | 13 + configs/mmdet/reppoints/metafile.yml | 181 ++++ configs/mmdet/reppoints/reppoints.png | Bin 0 -> 1198109 bytes ...nts_minmax_r50_fpn_gn-neck+head_1x_coco.py | 2 + ...01_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py | 8 + ...ts_moment_r101_fpn_gn-neck+head_2x_coco.py | 6 + .../reppoints_moment_r50_fpn_1x_coco.py | 67 ++ ...nts_moment_r50_fpn_gn-neck+head_1x_coco.py | 4 + ...nts_moment_r50_fpn_gn-neck+head_2x_coco.py | 3 + ...01_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py | 16 + ...ial_minmax_r50_fpn_gn-neck+head_1x_coco.py | 2 + configs/mmdet/res2net/README.md | 77 ++ .../cascade_mask_rcnn_r2_101_fpn_20e_coco.py | 10 + .../cascade_rcnn_r2_101_fpn_20e_coco.py | 10 + .../res2net/faster_rcnn_r2_101_fpn_2x_coco.py | 10 + .../mmdet/res2net/htc_r2_101_fpn_20e_coco.py | 13 + .../res2net/mask_rcnn_r2_101_fpn_2x_coco.py | 10 + configs/mmdet/res2net/metafile.yml | 146 +++ configs/mmdet/resnest/README.md | 54 + ...pn_syncbn-backbone+head_mstrain_1x_coco.py | 7 + ...pn_syncbn-backbone+head_mstrain_1x_coco.py | 118 +++ ...cbn-backbone+head_mstrain-range_1x_coco.py | 7 + ...cbn-backbone+head_mstrain-range_1x_coco.py | 116 +++ ...cbn-backbone+head_mstrain-range_1x_coco.py | 7 + ...cbn-backbone+head_mstrain-range_1x_coco.py | 62 ++ ...pn_syncbn-backbone+head_mstrain_1x_coco.py | 7 + ...pn_syncbn-backbone+head_mstrain_1x_coco.py | 64 ++ configs/mmdet/resnest/metafile.yml | 230 +++++ configs/mmdet/resnet_strikes_back/README.md | 40 + ..._mask_rcnn_r50_fpn_rsb-pretrain_1x_coco.py | 18 + ...aster_rcnn_r50_fpn_rsb-pretrain_1x_coco.py | 18 + .../mask_rcnn_r50_fpn_rsb-pretrain_1x_coco.py | 18 + .../mmdet/resnet_strikes_back/metafile.yml | 116 +++ .../retinanet_r50_fpn_rsb-pretrain_1x_coco.py | 18 + configs/mmdet/retinanet/README.md | 53 + configs/mmdet/retinanet/metafile.yml | 312 ++++++ .../retinanet_r101_caffe_fpn_1x_coco.py | 7 + ...etinanet_r101_caffe_fpn_mstrain_3x_coco.py | 7 + .../retinanet/retinanet_r101_fpn_1x_coco.py | 6 + .../retinanet/retinanet_r101_fpn_2x_coco.py | 6 + ...inanet_r101_fpn_mstrain_640-800_3x_coco.py | 6 + .../retinanet_r18_fpn_1x8_1x_coco.py | 23 + .../retinanet/retinanet_r18_fpn_1x_coco.py | 18 + .../retinanet_r50_caffe_fpn_1x_coco.py | 41 + ...retinanet_r50_caffe_fpn_mstrain_1x_coco.py | 46 + ...retinanet_r50_caffe_fpn_mstrain_2x_coco.py | 4 + ...retinanet_r50_caffe_fpn_mstrain_3x_coco.py | 4 + .../retinanet/retinanet_r50_fpn_1x_coco.py | 7 + .../retinanet/retinanet_r50_fpn_2x_coco.py | 4 + .../retinanet/retinanet_r50_fpn_90k_coco.py | 15 + .../retinanet_r50_fpn_fp16_1x_coco.py | 3 + ...tinanet_r50_fpn_mstrain_640-800_3x_coco.py | 5 + .../retinanet_x101_32x4d_fpn_1x_coco.py | 14 + .../retinanet_x101_32x4d_fpn_2x_coco.py | 14 + .../retinanet_x101_64x4d_fpn_1x_coco.py | 14 + .../retinanet_x101_64x4d_fpn_2x_coco.py | 14 + ..._x101_64x4d_fpn_mstrain_640-800_3x_coco.py | 8 + configs/mmdet/rpn/README.md | 39 + .../mmdet/rpn/rpn_r101_caffe_fpn_1x_coco.py | 7 + configs/mmdet/rpn/rpn_r101_fpn_1x_coco.py | 6 + configs/mmdet/rpn/rpn_r101_fpn_2x_coco.py | 6 + configs/mmdet/rpn/rpn_r50_caffe_c4_1x_coco.py | 38 + .../mmdet/rpn/rpn_r50_caffe_fpn_1x_coco.py | 41 + configs/mmdet/rpn/rpn_r50_fpn_1x_coco.py | 18 + configs/mmdet/rpn/rpn_r50_fpn_2x_coco.py | 5 + .../mmdet/rpn/rpn_x101_32x4d_fpn_1x_coco.py | 14 + .../mmdet/rpn/rpn_x101_32x4d_fpn_2x_coco.py | 14 + .../mmdet/rpn/rpn_x101_64x4d_fpn_1x_coco.py | 14 + .../mmdet/rpn/rpn_x101_64x4d_fpn_2x_coco.py | 14 + configs/mmdet/sabl/README.md | 47 + configs/mmdet/sabl/metafile.yml | 140 +++ .../sabl_cascade_rcnn_r101_fpn_1x_coco.py | 90 ++ .../sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py | 86 ++ .../sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py | 38 + .../sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py | 34 + .../sabl/sabl_retinanet_r101_fpn_1x_coco.py | 54 + .../sabl_retinanet_r101_fpn_gn_1x_coco.py | 56 + ...etinanet_r101_fpn_gn_2x_ms_480_960_coco.py | 73 ++ ...etinanet_r101_fpn_gn_2x_ms_640_800_coco.py | 73 ++ .../sabl/sabl_retinanet_r50_fpn_1x_coco.py | 50 + .../sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py | 52 + configs/mmdet/scnet/README.md | 63 ++ configs/mmdet/scnet/metafile.yml | 116 +++ .../mmdet/scnet/scnet_r101_fpn_20e_coco.py | 6 + configs/mmdet/scnet/scnet_r50_fpn_1x_coco.py | 136 +++ configs/mmdet/scnet/scnet_r50_fpn_20e_coco.py | 4 + .../scnet/scnet_x101_64x4d_fpn_20e_coco.py | 15 + .../scnet_x101_64x4d_fpn_8x1_20e_coco.py | 8 + configs/mmdet/scratch/README.md | 35 + ...ter_rcnn_r50_fpn_gn-all_scratch_6x_coco.py | 24 + ...ask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py | 25 + configs/mmdet/scratch/metafile.yml | 48 + configs/mmdet/seesaw_loss/README.md | 48 + ...n_random_seesaw_loss_mstrain_2x_lvis_v1.py | 132 +++ ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 5 + ...mple1e-3_seesaw_loss_mstrain_2x_lvis_v1.py | 98 ++ ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 5 + ...n_random_seesaw_loss_mstrain_2x_lvis_v1.py | 6 + ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 6 + ...mple1e-3_seesaw_loss_mstrain_2x_lvis_v1.py | 6 + ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 6 + ...n_random_seesaw_loss_mstrain_2x_lvis_v1.py | 75 ++ ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 5 + ...mple1e-3_seesaw_loss_mstrain_2x_lvis_v1.py | 41 + ...saw_loss_normed_mask_mstrain_2x_lvis_v1.py | 5 + configs/mmdet/seesaw_loss/metafile.yml | 203 ++++ configs/mmdet/selfsup_pretrain/README.md | 109 ++ ...sk_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py | 13 + ...rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py | 32 + ...mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py | 13 + ...k_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py | 32 + configs/mmdet/simple_copy_paste/README.md | 38 + ...syncbn-all_rpn-2conv_ssj_32x2_270k_coco.py | 20 + ..._syncbn-all_rpn-2conv_ssj_32x2_90k_coco.py | 7 + ...bn-all_rpn-2conv_ssj_scp_32x2_270k_coco.py | 20 + ...cbn-all_rpn-2conv_ssj_scp_32x2_90k_coco.py | 7 + configs/mmdet/solo/README.md | 54 + .../decoupled_solo_light_r50_fpn_3x_coco.py | 63 ++ .../solo/decoupled_solo_r50_fpn_1x_coco.py | 28 + .../solo/decoupled_solo_r50_fpn_3x_coco.py | 25 + configs/mmdet/solo/metafile.yml | 115 +++ configs/mmdet/solo/solo_r50_fpn_1x_coco.py | 53 + configs/mmdet/solo/solo_r50_fpn_3x_coco.py | 28 + configs/mmdet/sparse_rcnn/README.md | 38 + configs/mmdet/sparse_rcnn/metafile.yml | 80 ++ ..._proposals_crop_mstrain_480-800_3x_coco.py | 7 + ...e_rcnn_r101_fpn_mstrain_480-800_3x_coco.py | 7 + .../sparse_rcnn_r50_fpn_1x_coco.py | 95 ++ ..._proposals_crop_mstrain_480-800_3x_coco.py | 52 + ...se_rcnn_r50_fpn_mstrain_480-800_3x_coco.py | 23 + configs/mmdet/ssd/README.md | 62 ++ configs/mmdet/ssd/metafile.yml | 78 ++ configs/mmdet/ssd/ssd300_coco.py | 71 ++ configs/mmdet/ssd/ssd512_coco.py | 84 ++ .../ssdlite_mobilenetv2_scratch_600e_coco.py | 150 +++ configs/mmdet/strong_baselines/README.md | 20 + ..._fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py | 80 ++ ...syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py | 2 + ..._fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py | 6 + ..._fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py | 22 + ...syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py | 3 + ...0_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py | 5 + configs/mmdet/swin/README.md | 40 + ...n_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py | 6 + .../mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py | 42 + ...n_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py | 3 + ...k_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py | 91 ++ configs/mmdet/swin/metafile.yml | 120 +++ .../retinanet_swin-t-p4-w7_fpn_1x_coco.py | 30 + configs/mmdet/timm_example/README.md | 62 ++ ...inanet_timm_efficientnet_b1_fpn_1x_coco.py | 20 + .../retinanet_timm_tv_resnet50_fpn_1x_coco.py | 19 + configs/mmdet/tood/README.md | 40 + configs/mmdet/tood/metafile.yml | 95 ++ ...od_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py | 7 + .../tood/tood_r101_fpn_mstrain_2x_coco.py | 7 + configs/mmdet/tood/tood_r50_fpn_1x_coco.py | 74 ++ .../tood/tood_r50_fpn_anchor_based_1x_coco.py | 2 + .../tood/tood_r50_fpn_mstrain_2x_coco.py | 22 + ...1_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py | 7 + .../tood_x101_64x4d_fpn_mstrain_2x_coco.py | 16 + configs/mmdet/tridentnet/README.md | 38 + configs/mmdet/tridentnet/metafile.yml | 55 + .../tridentnet_r50_caffe_1x_coco.py | 55 + .../tridentnet_r50_caffe_mstrain_1x_coco.py | 22 + .../tridentnet_r50_caffe_mstrain_3x_coco.py | 4 + configs/mmdet/vfnet/README.md | 48 + configs/mmdet/vfnet/metafile.yml | 116 +++ configs/mmdet/vfnet/vfnet_r101_fpn_1x_coco.py | 6 + configs/mmdet/vfnet/vfnet_r101_fpn_2x_coco.py | 8 + ...t_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py | 15 + .../vfnet/vfnet_r101_fpn_mstrain_2x_coco.py | 6 + ...r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py | 18 + .../vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py | 16 + configs/mmdet/vfnet/vfnet_r50_fpn_1x_coco.py | 107 ++ ...et_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py | 6 + .../vfnet/vfnet_r50_fpn_mstrain_2x_coco.py | 39 + ..._32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py | 17 + .../vfnet_x101_32x4d_fpn_mstrain_2x_coco.py | 15 + ..._64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py | 17 + .../vfnet_x101_64x4d_fpn_mstrain_2x_coco.py | 15 + configs/mmdet/wider_face/README.md | 57 + configs/mmdet/wider_face/ssd300_wider_face.py | 18 + configs/mmdet/yolact/README.md | 74 ++ configs/mmdet/yolact/metafile.yml | 78 ++ configs/mmdet/yolact/yolact_r101_1x8_coco.py | 7 + configs/mmdet/yolact/yolact_r50_1x8_coco.py | 165 +++ configs/mmdet/yolact/yolact_r50_8x8_coco.py | 16 + configs/mmdet/yolo/README.md | 55 + configs/mmdet/yolo/metafile.yml | 124 +++ .../mmdet/yolo/yolov3_d53_320_273e_coco.py | 42 + .../yolov3_d53_fp16_mstrain-608_273e_coco.py | 3 + .../yolo/yolov3_d53_mstrain-416_273e_coco.py | 42 + .../yolo/yolov3_d53_mstrain-608_273e_coco.py | 132 +++ .../yolo/yolov3_mobilenetv2_320_300e_coco.py | 53 + ...olov3_mobilenetv2_mstrain-416_300e_coco.py | 142 +++ configs/mmdet/yolof/README.md | 35 + configs/mmdet/yolof/metafile.yml | 32 + .../mmdet/yolof/yolof_r50_c5_8x8_1x_coco.py | 110 ++ .../yolof/yolof_r50_c5_8x8_iter-1x_coco.py | 14 + configs/mmdet/yolox/README.md | 39 + configs/mmdet/yolox/metafile.yml | 70 ++ configs/mmdet/yolox/yolox_l_8x8_300e_coco.py | 8 + configs/mmdet/yolox/yolox_m_8x8_300e_coco.py | 8 + .../mmdet/yolox/yolox_nano_8x8_300e_coco.py | 11 + configs/mmdet/yolox/yolox_s_8x8_300e_coco.py | 165 +++ .../mmdet/yolox/yolox_tiny_8x8_300e_coco.py | 58 ++ configs/mmdet/yolox/yolox_x_8x8_300e_coco.py | 8 + configs/mmtune/_base_/space/mmdet_model.py | 339 ++++++ configs/mmtune/mmdet_asynchb_nevergrad_pso.py | 18 + mmtune/mm/tasks/__init__.py | 3 +- mmtune/mm/tasks/mmdet.py | 204 ++++ 780 files changed, 34899 insertions(+), 1 deletion(-) create mode 100644 configs/mmdet/_base_/datasets/cityscapes_detection.py create mode 100644 configs/mmdet/_base_/datasets/cityscapes_instance.py create mode 100644 configs/mmdet/_base_/datasets/coco_detection.py create mode 100644 configs/mmdet/_base_/datasets/coco_instance.py create mode 100644 configs/mmdet/_base_/datasets/coco_instance_semantic.py create mode 100644 configs/mmdet/_base_/datasets/coco_panoptic.py create mode 100644 configs/mmdet/_base_/datasets/deepfashion.py create mode 100644 configs/mmdet/_base_/datasets/lvis_v0.5_instance.py create mode 100644 configs/mmdet/_base_/datasets/lvis_v1_instance.py create mode 100644 configs/mmdet/_base_/datasets/openimages_detection.py create mode 100644 configs/mmdet/_base_/datasets/voc0712.py create mode 100644 configs/mmdet/_base_/datasets/wider_face.py create mode 100644 configs/mmdet/_base_/default_runtime.py create mode 100644 configs/mmdet/_base_/models/cascade_mask_rcnn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/cascade_rcnn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/fast_rcnn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/faster_rcnn_r50_caffe_c4.py create mode 100644 configs/mmdet/_base_/models/faster_rcnn_r50_caffe_dc5.py create mode 100644 configs/mmdet/_base_/models/faster_rcnn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/mask_rcnn_r50_caffe_c4.py create mode 100644 configs/mmdet/_base_/models/mask_rcnn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/retinanet_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/rpn_r50_caffe_c4.py create mode 100644 configs/mmdet/_base_/models/rpn_r50_fpn.py create mode 100644 configs/mmdet/_base_/models/ssd300.py create mode 100644 configs/mmdet/_base_/schedules/schedule_1x.py create mode 100644 configs/mmdet/_base_/schedules/schedule_20e.py create mode 100644 configs/mmdet/_base_/schedules/schedule_2x.py create mode 100644 configs/mmdet/albu_example/README.md create mode 100644 configs/mmdet/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py create mode 100644 configs/mmdet/atss/README.md create mode 100644 configs/mmdet/atss/atss_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/atss/atss_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/atss/metafile.yml create mode 100644 configs/mmdet/autoassign/README.md create mode 100644 configs/mmdet/autoassign/autoassign_r50_fpn_8x2_1x_coco.py create mode 100644 configs/mmdet/autoassign/metafile.yml create mode 100644 configs/mmdet/carafe/README.md create mode 100644 configs/mmdet/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py create mode 100644 configs/mmdet/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py create mode 100644 configs/mmdet/carafe/metafile.yml create mode 100644 configs/mmdet/cascade_rcnn/README.md create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py create mode 100644 configs/mmdet/cascade_rcnn/metafile.yml create mode 100644 configs/mmdet/cascade_rpn/README.md create mode 100644 configs/mmdet/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/cascade_rpn/metafile.yml create mode 100644 configs/mmdet/centernet/README.md create mode 100644 configs/mmdet/centernet/centernet_resnet18_140e_coco.py create mode 100644 configs/mmdet/centernet/centernet_resnet18_dcnv2_140e_coco.py create mode 100644 configs/mmdet/centernet/metafile.yml create mode 100644 configs/mmdet/centripetalnet/README.md create mode 100644 configs/mmdet/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py create mode 100644 configs/mmdet/centripetalnet/metafile.yml create mode 100644 configs/mmdet/cityscapes/README.md create mode 100644 configs/mmdet/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py create mode 100644 configs/mmdet/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py create mode 100644 configs/mmdet/common/lsj_100e_coco_instance.py create mode 100644 configs/mmdet/common/mstrain-poly_3x_coco_instance.py create mode 100644 configs/mmdet/common/mstrain_3x_coco.py create mode 100644 configs/mmdet/common/mstrain_3x_coco_instance.py create mode 100644 configs/mmdet/common/ssj_270k_coco_instance.py create mode 100644 configs/mmdet/common/ssj_scp_270k_coco_instance.py create mode 100644 configs/mmdet/cornernet/README.md create mode 100644 configs/mmdet/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py create mode 100644 configs/mmdet/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py create mode 100644 configs/mmdet/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py create mode 100644 configs/mmdet/cornernet/metafile.yml create mode 100644 configs/mmdet/dcn/README.md create mode 100644 configs/mmdet/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py create mode 100644 configs/mmdet/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcn/metafile.yml create mode 100644 configs/mmdet/dcnv2/README.md create mode 100644 configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py create mode 100644 configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdpool_1x_coco.py create mode 100644 configs/mmdet/dcnv2/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcnv2/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/dcnv2/metafile.yml create mode 100644 configs/mmdet/deepfashion/README.md create mode 100644 configs/mmdet/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py create mode 100644 configs/mmdet/deformable_detr/README.md create mode 100644 configs/mmdet/deformable_detr/deformable_detr_r50_16x2_50e_coco.py create mode 100644 configs/mmdet/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py create mode 100644 configs/mmdet/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py create mode 100644 configs/mmdet/deformable_detr/metafile.yml create mode 100644 configs/mmdet/detectors/README.md create mode 100644 configs/mmdet/detectors/cascade_rcnn_r50_rfp_1x_coco.py create mode 100644 configs/mmdet/detectors/cascade_rcnn_r50_sac_1x_coco.py create mode 100644 configs/mmdet/detectors/detectors_cascade_rcnn_r50_1x_coco.py create mode 100644 configs/mmdet/detectors/detectors_htc_r101_20e_coco.py create mode 100644 configs/mmdet/detectors/detectors_htc_r50_1x_coco.py create mode 100644 configs/mmdet/detectors/htc_r50_rfp_1x_coco.py create mode 100644 configs/mmdet/detectors/htc_r50_sac_1x_coco.py create mode 100644 configs/mmdet/detectors/metafile.yml create mode 100644 configs/mmdet/detr/README.md create mode 100644 configs/mmdet/detr/detr_r50_8x2_150e_coco.py create mode 100644 configs/mmdet/detr/metafile.yml create mode 100644 configs/mmdet/double_heads/README.md create mode 100644 configs/mmdet/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/double_heads/metafile.yml create mode 100644 configs/mmdet/dyhead/README.md create mode 100644 configs/mmdet/dyhead/atss_r50_caffe_fpn_dyhead_1x_coco.py create mode 100644 configs/mmdet/dyhead/atss_r50_fpn_dyhead_1x_coco.py create mode 100644 configs/mmdet/dyhead/metafile.yml create mode 100644 configs/mmdet/dynamic_rcnn/README.md create mode 100644 configs/mmdet/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/dynamic_rcnn/metafile.yml create mode 100644 configs/mmdet/efficientnet/README.md create mode 100644 configs/mmdet/efficientnet/metafile.yml create mode 100644 configs/mmdet/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py create mode 100644 configs/mmdet/empirical_attention/README.md create mode 100644 configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py create mode 100644 configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py create mode 100644 configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py create mode 100644 configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py create mode 100644 configs/mmdet/empirical_attention/metafile.yml create mode 100644 configs/mmdet/fast_rcnn/README.md create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/README.md create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/faster_rcnn/metafile.yml create mode 100644 configs/mmdet/fcos/README.md create mode 100644 configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py create mode 100644 configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py create mode 100644 configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py create mode 100644 configs/mmdet/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py create mode 100644 configs/mmdet/fcos/metafile.yml create mode 100644 configs/mmdet/foveabox/README.md create mode 100644 configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_r101_fpn_4x4_1x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_r101_fpn_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_r50_fpn_4x4_1x_coco.py create mode 100644 configs/mmdet/foveabox/fovea_r50_fpn_4x4_2x_coco.py create mode 100644 configs/mmdet/foveabox/metafile.yml create mode 100644 configs/mmdet/fpg/README.md create mode 100644 configs/mmdet/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/metafile.yml create mode 100644 configs/mmdet/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py create mode 100644 configs/mmdet/fpg/retinanet_r50_fpg_crop640_50e_coco.py create mode 100644 configs/mmdet/free_anchor/README.md create mode 100644 configs/mmdet/free_anchor/metafile.yml create mode 100644 configs/mmdet/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/fsaf/README.md create mode 100644 configs/mmdet/fsaf/fsaf_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/fsaf/fsaf_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/fsaf/metafile.yml create mode 100644 configs/mmdet/gcnet/README.md create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py create mode 100644 configs/mmdet/gcnet/metafile.yml create mode 100644 configs/mmdet/gfl/README.md create mode 100644 configs/mmdet/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/gfl/gfl_r101_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/gfl/gfl_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/gfl/gfl_r50_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/gfl/metafile.yml create mode 100644 configs/mmdet/ghm/README.md create mode 100644 configs/mmdet/ghm/metafile.yml create mode 100644 configs/mmdet/ghm/retinanet_ghm_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/ghm/retinanet_ghm_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/gn+ws/README.md create mode 100644 configs/mmdet/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py create mode 100644 configs/mmdet/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py create mode 100644 configs/mmdet/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py create mode 100644 configs/mmdet/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py create mode 100644 configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py create mode 100644 configs/mmdet/gn+ws/metafile.yml create mode 100644 configs/mmdet/gn/README.md create mode 100644 configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py create mode 100644 configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py create mode 100644 configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py create mode 100644 configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py create mode 100644 configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py create mode 100644 configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py create mode 100644 configs/mmdet/gn/metafile.yml create mode 100644 configs/mmdet/grid_rcnn/README.md create mode 100644 configs/mmdet/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py create mode 100644 configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py create mode 100644 configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py create mode 100644 configs/mmdet/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py create mode 100644 configs/mmdet/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py create mode 100644 configs/mmdet/grid_rcnn/metafile.yml create mode 100644 configs/mmdet/groie/README.md create mode 100644 configs/mmdet/groie/faster_rcnn_r50_fpn_groie_1x_coco.py create mode 100644 configs/mmdet/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py create mode 100644 configs/mmdet/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py create mode 100644 configs/mmdet/groie/mask_rcnn_r50_fpn_groie_1x_coco.py create mode 100644 configs/mmdet/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py create mode 100644 configs/mmdet/groie/metafile.yml create mode 100644 configs/mmdet/guided_anchoring/README.md create mode 100644 configs/mmdet/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_faster_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/guided_anchoring/metafile.yml create mode 100644 configs/mmdet/hrnet/README.md create mode 100644 configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py create mode 100644 configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py create mode 100644 configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py create mode 100644 configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py create mode 100644 configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py create mode 100644 configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py create mode 100644 configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py create mode 100644 configs/mmdet/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py create mode 100644 configs/mmdet/hrnet/htc_hrnetv2p_w18_20e_coco.py create mode 100644 configs/mmdet/hrnet/htc_hrnetv2p_w32_20e_coco.py create mode 100644 configs/mmdet/hrnet/htc_hrnetv2p_w40_20e_coco.py create mode 100644 configs/mmdet/hrnet/htc_hrnetv2p_w40_28e_coco.py create mode 100644 configs/mmdet/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py create mode 100644 configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py create mode 100644 configs/mmdet/hrnet/metafile.yml create mode 100644 configs/mmdet/htc/README.md create mode 100644 configs/mmdet/htc/htc_r101_fpn_20e_coco.py create mode 100644 configs/mmdet/htc/htc_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/htc/htc_r50_fpn_20e_coco.py create mode 100644 configs/mmdet/htc/htc_without_semantic_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py create mode 100644 configs/mmdet/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py create mode 100644 configs/mmdet/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py create mode 100644 configs/mmdet/htc/metafile.yml create mode 100644 configs/mmdet/instaboost/README.md create mode 100644 configs/mmdet/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py create mode 100644 configs/mmdet/instaboost/metafile.yml create mode 100644 configs/mmdet/lad/README.md create mode 100644 configs/mmdet/lad/lad_r101_paa_r50_fpn_coco_1x.py create mode 100644 configs/mmdet/lad/lad_r50_paa_r101_fpn_coco_1x.py create mode 100644 configs/mmdet/lad/metafile.yml create mode 100644 configs/mmdet/ld/README.md create mode 100644 configs/mmdet/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py create mode 100644 configs/mmdet/ld/ld_r18_gflv1_r101_fpn_coco_1x.py create mode 100644 configs/mmdet/ld/ld_r34_gflv1_r101_fpn_coco_1x.py create mode 100644 configs/mmdet/ld/ld_r50_gflv1_r101_fpn_coco_1x.py create mode 100644 configs/mmdet/ld/metafile.yml create mode 100644 configs/mmdet/legacy_1.x/README.md create mode 100644 configs/mmdet/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py create mode 100644 configs/mmdet/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py create mode 100644 configs/mmdet/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py create mode 100644 configs/mmdet/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py create mode 100644 configs/mmdet/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py create mode 100644 configs/mmdet/legacy_1.x/ssd300_coco_v1.py create mode 100644 configs/mmdet/libra_rcnn/README.md create mode 100644 configs/mmdet/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/libra_rcnn/metafile.yml create mode 100644 configs/mmdet/lvis/README.md create mode 100644 configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py create mode 100644 configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py create mode 100644 configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py create mode 100644 configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py create mode 100644 configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py create mode 100644 configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py create mode 100644 configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py create mode 100644 configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py create mode 100644 configs/mmdet/mask2former/README.md create mode 100644 configs/mmdet/mask2former/mask2former_r101_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_r50_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py create mode 100644 configs/mmdet/mask2former/metafile.yml create mode 100644 configs/mmdet/mask_rcnn/README.md create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/mask_rcnn/metafile.yml create mode 100644 configs/mmdet/maskformer/README.md create mode 100644 configs/mmdet/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py create mode 100644 configs/mmdet/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py create mode 100644 configs/mmdet/maskformer/metafile.yml create mode 100644 configs/mmdet/ms_rcnn/README.md create mode 100644 configs/mmdet/ms_rcnn/metafile.yml create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/nas_fcos/README.md create mode 100644 configs/mmdet/nas_fcos/metafile.yml create mode 100644 configs/mmdet/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py create mode 100644 configs/mmdet/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py create mode 100644 configs/mmdet/nas_fpn/README.md create mode 100644 configs/mmdet/nas_fpn/metafile.yml create mode 100644 configs/mmdet/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py create mode 100644 configs/mmdet/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py create mode 100644 configs/mmdet/openimages/README.md create mode 100644 configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py create mode 100644 configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py create mode 100644 configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py create mode 100644 configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py create mode 100644 configs/mmdet/openimages/metafile.yml create mode 100644 configs/mmdet/openimages/retinanet_r50_fpn_32x2_1x_openimages.py create mode 100644 configs/mmdet/openimages/ssd300_32x8_36e_openimages.py create mode 100644 configs/mmdet/paa/README.md create mode 100644 configs/mmdet/paa/metafile.yml create mode 100644 configs/mmdet/paa/paa_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/paa/paa_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/paa/paa_r101_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/paa/paa_r50_fpn_1.5x_coco.py create mode 100644 configs/mmdet/paa/paa_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/paa/paa_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/paa/paa_r50_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/pafpn/README.md create mode 100644 configs/mmdet/pafpn/faster_rcnn_r50_pafpn_1x_coco.py create mode 100644 configs/mmdet/pafpn/metafile.yml create mode 100644 configs/mmdet/panoptic_fpn/README.md create mode 100644 configs/mmdet/panoptic_fpn/metafile.yml create mode 100644 configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/pascal_voc/README.md create mode 100644 configs/mmdet/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py create mode 100644 configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py create mode 100644 configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py create mode 100644 configs/mmdet/pascal_voc/retinanet_r50_fpn_1x_voc0712.py create mode 100644 configs/mmdet/pascal_voc/ssd300_voc0712.py create mode 100644 configs/mmdet/pascal_voc/ssd512_voc0712.py create mode 100644 configs/mmdet/pisa/README.md create mode 100644 configs/mmdet/pisa/metafile.yml create mode 100644 configs/mmdet/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_retinanet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/pisa/pisa_ssd300_coco.py create mode 100644 configs/mmdet/pisa/pisa_ssd512_coco.py create mode 100644 configs/mmdet/point_rend/README.md create mode 100644 configs/mmdet/point_rend/metafile.yml create mode 100644 configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py create mode 100644 configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/pvt/README.md create mode 100644 configs/mmdet/pvt/metafile.yml create mode 100644 configs/mmdet/pvt/retinanet_pvt-l_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvt-m_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvt-s_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvt-t_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py create mode 100644 configs/mmdet/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py create mode 100644 configs/mmdet/queryinst/README.md create mode 100644 configs/mmdet/queryinst/metafile.yml create mode 100644 configs/mmdet/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/queryinst/queryinst_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/regnet/README.md create mode 100644 configs/mmdet/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py create mode 100644 configs/mmdet/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/metafile.yml create mode 100644 configs/mmdet/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py create mode 100644 configs/mmdet/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py create mode 100644 configs/mmdet/reppoints/README.md create mode 100644 configs/mmdet/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py create mode 100644 configs/mmdet/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py create mode 100644 configs/mmdet/reppoints/metafile.yml create mode 100644 configs/mmdet/reppoints/reppoints.png create mode 100644 configs/mmdet/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py create mode 100644 configs/mmdet/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py create mode 100644 configs/mmdet/res2net/README.md create mode 100644 configs/mmdet/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py create mode 100644 configs/mmdet/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py create mode 100644 configs/mmdet/res2net/faster_rcnn_r2_101_fpn_2x_coco.py create mode 100644 configs/mmdet/res2net/htc_r2_101_fpn_20e_coco.py create mode 100644 configs/mmdet/res2net/mask_rcnn_r2_101_fpn_2x_coco.py create mode 100644 configs/mmdet/res2net/metafile.yml create mode 100644 configs/mmdet/resnest/README.md create mode 100644 configs/mmdet/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py create mode 100644 configs/mmdet/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py create mode 100644 configs/mmdet/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py create mode 100644 configs/mmdet/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py create mode 100644 configs/mmdet/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py create mode 100644 configs/mmdet/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py create mode 100644 configs/mmdet/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py create mode 100644 configs/mmdet/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py create mode 100644 configs/mmdet/resnest/metafile.yml create mode 100644 configs/mmdet/resnet_strikes_back/README.md create mode 100644 configs/mmdet/resnet_strikes_back/cascade_mask_rcnn_r50_fpn_rsb-pretrain_1x_coco.py create mode 100644 configs/mmdet/resnet_strikes_back/faster_rcnn_r50_fpn_rsb-pretrain_1x_coco.py create mode 100644 configs/mmdet/resnet_strikes_back/mask_rcnn_r50_fpn_rsb-pretrain_1x_coco.py create mode 100644 configs/mmdet/resnet_strikes_back/metafile.yml create mode 100644 configs/mmdet/resnet_strikes_back/retinanet_r50_fpn_rsb-pretrain_1x_coco.py create mode 100644 configs/mmdet/retinanet/README.md create mode 100644 configs/mmdet/retinanet/metafile.yml create mode 100644 configs/mmdet/retinanet/retinanet_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r101_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r101_fpn_mstrain_640-800_3x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r18_fpn_1x8_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r18_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_fpn_90k_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_fpn_fp16_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_r50_fpn_mstrain_640-800_3x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/retinanet/retinanet_x101_64x4d_fpn_mstrain_640-800_3x_coco.py create mode 100644 configs/mmdet/rpn/README.md create mode 100644 configs/mmdet/rpn/rpn_r101_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r50_caffe_c4_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r50_caffe_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_r50_fpn_2x_coco.py create mode 100644 configs/mmdet/rpn/rpn_x101_32x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_x101_32x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/rpn/rpn_x101_64x4d_fpn_1x_coco.py create mode 100644 configs/mmdet/rpn/rpn_x101_64x4d_fpn_2x_coco.py create mode 100644 configs/mmdet/sabl/README.md create mode 100644 configs/mmdet/sabl/metafile.yml create mode 100644 configs/mmdet/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py create mode 100644 configs/mmdet/scnet/README.md create mode 100644 configs/mmdet/scnet/metafile.yml create mode 100644 configs/mmdet/scnet/scnet_r101_fpn_20e_coco.py create mode 100644 configs/mmdet/scnet/scnet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/scnet/scnet_r50_fpn_20e_coco.py create mode 100644 configs/mmdet/scnet/scnet_x101_64x4d_fpn_20e_coco.py create mode 100644 configs/mmdet/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py create mode 100644 configs/mmdet/scratch/README.md create mode 100644 configs/mmdet/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py create mode 100644 configs/mmdet/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py create mode 100644 configs/mmdet/scratch/metafile.yml create mode 100644 configs/mmdet/seesaw_loss/README.md create mode 100644 configs/mmdet/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py create mode 100644 configs/mmdet/seesaw_loss/metafile.yml create mode 100644 configs/mmdet/selfsup_pretrain/README.md create mode 100644 configs/mmdet/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py create mode 100644 configs/mmdet/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py create mode 100644 configs/mmdet/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py create mode 100644 configs/mmdet/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py create mode 100644 configs/mmdet/simple_copy_paste/README.md create mode 100644 configs/mmdet/simple_copy_paste/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_ssj_32x2_270k_coco.py create mode 100644 configs/mmdet/simple_copy_paste/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_ssj_32x2_90k_coco.py create mode 100644 configs/mmdet/simple_copy_paste/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_ssj_scp_32x2_270k_coco.py create mode 100644 configs/mmdet/simple_copy_paste/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_ssj_scp_32x2_90k_coco.py create mode 100644 configs/mmdet/solo/README.md create mode 100644 configs/mmdet/solo/decoupled_solo_light_r50_fpn_3x_coco.py create mode 100644 configs/mmdet/solo/decoupled_solo_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/solo/decoupled_solo_r50_fpn_3x_coco.py create mode 100644 configs/mmdet/solo/metafile.yml create mode 100644 configs/mmdet/solo/solo_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/solo/solo_r50_fpn_3x_coco.py create mode 100644 configs/mmdet/sparse_rcnn/README.md create mode 100644 configs/mmdet/sparse_rcnn/metafile.yml create mode 100644 configs/mmdet/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py create mode 100644 configs/mmdet/ssd/README.md create mode 100644 configs/mmdet/ssd/metafile.yml create mode 100644 configs/mmdet/ssd/ssd300_coco.py create mode 100644 configs/mmdet/ssd/ssd512_coco.py create mode 100644 configs/mmdet/ssd/ssdlite_mobilenetv2_scratch_600e_coco.py create mode 100644 configs/mmdet/strong_baselines/README.md create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_caffe_fpn_syncbn-all_rpn-2conv_lsj_400e_coco.py create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_coco.py create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_100e_fp16_coco.py create mode 100644 configs/mmdet/strong_baselines/mask_rcnn_r50_fpn_syncbn-all_rpn-2conv_lsj_50e_coco.py create mode 100644 configs/mmdet/swin/README.md create mode 100644 configs/mmdet/swin/mask_rcnn_swin-s-p4-w7_fpn_fp16_ms-crop-3x_coco.py create mode 100644 configs/mmdet/swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py create mode 100644 configs/mmdet/swin/mask_rcnn_swin-t-p4-w7_fpn_fp16_ms-crop-3x_coco.py create mode 100644 configs/mmdet/swin/mask_rcnn_swin-t-p4-w7_fpn_ms-crop-3x_coco.py create mode 100644 configs/mmdet/swin/metafile.yml create mode 100644 configs/mmdet/swin/retinanet_swin-t-p4-w7_fpn_1x_coco.py create mode 100644 configs/mmdet/timm_example/README.md create mode 100644 configs/mmdet/timm_example/retinanet_timm_efficientnet_b1_fpn_1x_coco.py create mode 100644 configs/mmdet/timm_example/retinanet_timm_tv_resnet50_fpn_1x_coco.py create mode 100644 configs/mmdet/tood/README.md create mode 100644 configs/mmdet/tood/metafile.yml create mode 100644 configs/mmdet/tood/tood_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/tood/tood_r101_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/tood/tood_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/tood/tood_r50_fpn_anchor_based_1x_coco.py create mode 100644 configs/mmdet/tood/tood_r50_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/tood/tood_x101_64x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/tood/tood_x101_64x4d_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/tridentnet/README.md create mode 100644 configs/mmdet/tridentnet/metafile.yml create mode 100644 configs/mmdet/tridentnet/tridentnet_r50_caffe_1x_coco.py create mode 100644 configs/mmdet/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py create mode 100644 configs/mmdet/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py create mode 100644 configs/mmdet/vfnet/README.md create mode 100644 configs/mmdet/vfnet/metafile.yml create mode 100644 configs/mmdet/vfnet/vfnet_r101_fpn_1x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r101_fpn_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r50_fpn_1x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py create mode 100644 configs/mmdet/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py create mode 100644 configs/mmdet/wider_face/README.md create mode 100644 configs/mmdet/wider_face/ssd300_wider_face.py create mode 100644 configs/mmdet/yolact/README.md create mode 100644 configs/mmdet/yolact/metafile.yml create mode 100644 configs/mmdet/yolact/yolact_r101_1x8_coco.py create mode 100644 configs/mmdet/yolact/yolact_r50_1x8_coco.py create mode 100644 configs/mmdet/yolact/yolact_r50_8x8_coco.py create mode 100644 configs/mmdet/yolo/README.md create mode 100644 configs/mmdet/yolo/metafile.yml create mode 100644 configs/mmdet/yolo/yolov3_d53_320_273e_coco.py create mode 100644 configs/mmdet/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py create mode 100644 configs/mmdet/yolo/yolov3_d53_mstrain-416_273e_coco.py create mode 100644 configs/mmdet/yolo/yolov3_d53_mstrain-608_273e_coco.py create mode 100644 configs/mmdet/yolo/yolov3_mobilenetv2_320_300e_coco.py create mode 100644 configs/mmdet/yolo/yolov3_mobilenetv2_mstrain-416_300e_coco.py create mode 100644 configs/mmdet/yolof/README.md create mode 100644 configs/mmdet/yolof/metafile.yml create mode 100644 configs/mmdet/yolof/yolof_r50_c5_8x8_1x_coco.py create mode 100644 configs/mmdet/yolof/yolof_r50_c5_8x8_iter-1x_coco.py create mode 100644 configs/mmdet/yolox/README.md create mode 100644 configs/mmdet/yolox/metafile.yml create mode 100644 configs/mmdet/yolox/yolox_l_8x8_300e_coco.py create mode 100644 configs/mmdet/yolox/yolox_m_8x8_300e_coco.py create mode 100644 configs/mmdet/yolox/yolox_nano_8x8_300e_coco.py create mode 100644 configs/mmdet/yolox/yolox_s_8x8_300e_coco.py create mode 100644 configs/mmdet/yolox/yolox_tiny_8x8_300e_coco.py create mode 100644 configs/mmdet/yolox/yolox_x_8x8_300e_coco.py create mode 100644 configs/mmtune/_base_/space/mmdet_model.py create mode 100644 configs/mmtune/mmdet_asynchb_nevergrad_pso.py create mode 100644 mmtune/mm/tasks/mmdet.py diff --git a/configs/mmdet/_base_/datasets/cityscapes_detection.py b/configs/mmdet/_base_/datasets/cityscapes_detection.py new file mode 100644 index 00000000..e341b59d --- /dev/null +++ b/configs/mmdet/_base_/datasets/cityscapes_detection.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/configs/mmdet/_base_/datasets/cityscapes_instance.py b/configs/mmdet/_base_/datasets/cityscapes_instance.py new file mode 100644 index 00000000..4e3c34e2 --- /dev/null +++ b/configs/mmdet/_base_/datasets/cityscapes_instance.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/coco_detection.py b/configs/mmdet/_base_/datasets/coco_detection.py new file mode 100644 index 00000000..149f590b --- /dev/null +++ b/configs/mmdet/_base_/datasets/coco_detection.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/configs/mmdet/_base_/datasets/coco_instance.py b/configs/mmdet/_base_/datasets/coco_instance.py new file mode 100644 index 00000000..9901a858 --- /dev/null +++ b/configs/mmdet/_base_/datasets/coco_instance.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/coco_instance_semantic.py b/configs/mmdet/_base_/datasets/coco_instance_semantic.py new file mode 100644 index 00000000..6c8bf07b --- /dev/null +++ b/configs/mmdet/_base_/datasets/coco_instance_semantic.py @@ -0,0 +1,54 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/coco_panoptic.py b/configs/mmdet/_base_/datasets/coco_panoptic.py new file mode 100644 index 00000000..dbade7c0 --- /dev/null +++ b/configs/mmdet/_base_/datasets/coco_panoptic.py @@ -0,0 +1,59 @@ +# dataset settings +dataset_type = 'CocoPanopticDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 4), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'annotations/panoptic_train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_val2017.json', + img_prefix=data_root + 'val2017/', + seg_prefix=data_root + 'annotations/panoptic_val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_val2017.json', + img_prefix=data_root + 'val2017/', + seg_prefix=data_root + 'annotations/panoptic_val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['PQ']) diff --git a/configs/mmdet/_base_/datasets/deepfashion.py b/configs/mmdet/_base_/datasets/deepfashion.py new file mode 100644 index 00000000..308b4b2a --- /dev/null +++ b/configs/mmdet/_base_/datasets/deepfashion.py @@ -0,0 +1,53 @@ +# dataset settings +dataset_type = 'DeepFashionDataset' +data_root = 'data/DeepFashion/In-shop/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(750, 1101), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(750, 1101), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + imgs_per_gpu=2, + workers_per_gpu=1, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=train_pipeline, + data_root=data_root), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/DeepFashion_segmentation_gallery.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/lvis_v0.5_instance.py b/configs/mmdet/_base_/datasets/lvis_v0.5_instance.py new file mode 100644 index 00000000..207e0053 --- /dev/null +++ b/configs/mmdet/_base_/datasets/lvis_v0.5_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV05Dataset' +data_root = 'data/lvis_v0.5/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_train.json', + img_prefix=data_root + 'train2017/')), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/'), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/')) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/lvis_v1_instance.py b/configs/mmdet/_base_/datasets/lvis_v1_instance.py new file mode 100644 index 00000000..be791edd --- /dev/null +++ b/configs/mmdet/_base_/datasets/lvis_v1_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/configs/mmdet/_base_/datasets/openimages_detection.py b/configs/mmdet/_base_/datasets/openimages_detection.py new file mode 100644 index 00000000..a65d3063 --- /dev/null +++ b/configs/mmdet/_base_/datasets/openimages_detection.py @@ -0,0 +1,65 @@ +# dataset settings +dataset_type = 'OpenImagesDataset' +data_root = 'data/OpenImages/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, denorm_bbox=True), + dict(type='Resize', img_scale=(1024, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1024, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ], + ), +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=0, # workers_per_gpu > 0 may occur out of memory + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/oidv6-train-annotations-bbox.csv', + img_prefix=data_root + 'OpenImages/train/', + label_file=data_root + 'annotations/class-descriptions-boxable.csv', + hierarchy_file=data_root + + 'annotations/bbox_labels_600_hierarchy.json', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/validation-annotations-bbox.csv', + img_prefix=data_root + 'OpenImages/validation/', + label_file=data_root + 'annotations/class-descriptions-boxable.csv', + hierarchy_file=data_root + + 'annotations/bbox_labels_600_hierarchy.json', + meta_file=data_root + 'annotations/validation-image-metas.pkl', + image_level_ann_file=data_root + + 'annotations/validation-annotations-human-imagelabels-boxable.csv', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/validation-annotations-bbox.csv', + img_prefix=data_root + 'OpenImages/validation/', + label_file=data_root + 'annotations/class-descriptions-boxable.csv', + hierarchy_file=data_root + + 'annotations/bbox_labels_600_hierarchy.json', + meta_file=data_root + 'annotations/validation-image-metas.pkl', + image_level_ann_file=data_root + + 'annotations/validation-annotations-human-imagelabels-boxable.csv', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='mAP') diff --git a/configs/mmdet/_base_/datasets/voc0712.py b/configs/mmdet/_base_/datasets/voc0712.py new file mode 100644 index 00000000..ae09acdd --- /dev/null +++ b/configs/mmdet/_base_/datasets/voc0712.py @@ -0,0 +1,55 @@ +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=[ + data_root + 'VOC2007/ImageSets/Main/trainval.txt', + data_root + 'VOC2012/ImageSets/Main/trainval.txt' + ], + img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='mAP') diff --git a/configs/mmdet/_base_/datasets/wider_face.py b/configs/mmdet/_base_/datasets/wider_face.py new file mode 100644 index 00000000..d1d649be --- /dev/null +++ b/configs/mmdet/_base_/datasets/wider_face.py @@ -0,0 +1,63 @@ +# dataset settings +dataset_type = 'WIDERFaceDataset' +data_root = 'data/WIDERFace/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=60, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=2, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'train.txt', + img_prefix=data_root + 'WIDER_train/', + min_size=17, + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline)) diff --git a/configs/mmdet/_base_/default_runtime.py b/configs/mmdet/_base_/default_runtime.py new file mode 100644 index 00000000..5b0b1452 --- /dev/null +++ b/configs/mmdet/_base_/default_runtime.py @@ -0,0 +1,27 @@ +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +custom_hooks = [dict(type='NumClassCheckHook')] + +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = None +workflow = [('train', 1)] + +# disable opencv multithreading to avoid system being overloaded +opencv_num_threads = 0 +# set multi-process start method as `fork` to speed up the training +mp_start_method = 'fork' + +# Default setting for scaling LR automatically +# - `enable` means enable scaling LR automatically +# or not by default. +# - `base_batch_size` = (8 GPUs) x (2 samples per GPU). +auto_scale_lr = dict(enable=False, base_batch_size=16) diff --git a/configs/mmdet/_base_/models/cascade_mask_rcnn_r50_fpn.py b/configs/mmdet/_base_/models/cascade_mask_rcnn_r50_fpn.py new file mode 100644 index 00000000..2902ccae --- /dev/null +++ b/configs/mmdet/_base_/models/cascade_mask_rcnn_r50_fpn.py @@ -0,0 +1,196 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/configs/mmdet/_base_/models/cascade_rcnn_r50_fpn.py b/configs/mmdet/_base_/models/cascade_rcnn_r50_fpn.py new file mode 100644 index 00000000..42f74ae7 --- /dev/null +++ b/configs/mmdet/_base_/models/cascade_rcnn_r50_fpn.py @@ -0,0 +1,179 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/configs/mmdet/_base_/models/fast_rcnn_r50_fpn.py b/configs/mmdet/_base_/models/fast_rcnn_r50_fpn.py new file mode 100644 index 00000000..9982fe09 --- /dev/null +++ b/configs/mmdet/_base_/models/fast_rcnn_r50_fpn.py @@ -0,0 +1,62 @@ +# model settings +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_c4.py b/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_c4.py new file mode 100644 index 00000000..dbf965af --- /dev/null +++ b/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_c4.py @@ -0,0 +1,117 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_dc5.py b/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_dc5.py new file mode 100644 index 00000000..a377a6f0 --- /dev/null +++ b/configs/mmdet/_base_/models/faster_rcnn_r50_caffe_dc5.py @@ -0,0 +1,105 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + strides=(1, 2, 2, 1), + dilations=(1, 1, 1, 2), + out_indices=(3, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=2048, + feat_channels=2048, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=2048, + featmap_strides=[16]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=2048, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms=dict(type='nms', iou_threshold=0.7), + nms_pre=6000, + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/configs/mmdet/_base_/models/faster_rcnn_r50_fpn.py b/configs/mmdet/_base_/models/faster_rcnn_r50_fpn.py new file mode 100644 index 00000000..1ef8e7b2 --- /dev/null +++ b/configs/mmdet/_base_/models/faster_rcnn_r50_fpn.py @@ -0,0 +1,108 @@ +# model settings +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) + # soft-nms is also supported for rcnn testing + # e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05) + )) diff --git a/configs/mmdet/_base_/models/mask_rcnn_r50_caffe_c4.py b/configs/mmdet/_base_/models/mask_rcnn_r50_caffe_c4.py new file mode 100644 index 00000000..122202e1 --- /dev/null +++ b/configs/mmdet/_base_/models/mask_rcnn_r50_caffe_c4.py @@ -0,0 +1,125 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=None, + mask_head=dict( + type='FCNMaskHead', + num_convs=0, + in_channels=2048, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=14, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + nms=dict(type='nms', iou_threshold=0.7), + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/configs/mmdet/_base_/models/mask_rcnn_r50_fpn.py b/configs/mmdet/_base_/models/mask_rcnn_r50_fpn.py new file mode 100644 index 00000000..d903e55e --- /dev/null +++ b/configs/mmdet/_base_/models/mask_rcnn_r50_fpn.py @@ -0,0 +1,120 @@ +# model settings +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/configs/mmdet/_base_/models/retinanet_r50_fpn.py b/configs/mmdet/_base_/models/retinanet_r50_fpn.py new file mode 100644 index 00000000..56e43fa7 --- /dev/null +++ b/configs/mmdet/_base_/models/retinanet_r50_fpn.py @@ -0,0 +1,60 @@ +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/configs/mmdet/_base_/models/rpn_r50_caffe_c4.py b/configs/mmdet/_base_/models/rpn_r50_caffe_c4.py new file mode 100644 index 00000000..8b32ca99 --- /dev/null +++ b/configs/mmdet/_base_/models/rpn_r50_caffe_c4.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=None, + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/_base_/models/rpn_r50_fpn.py b/configs/mmdet/_base_/models/rpn_r50_fpn.py new file mode 100644 index 00000000..edaf4d4b --- /dev/null +++ b/configs/mmdet/_base_/models/rpn_r50_fpn.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/_base_/models/ssd300.py b/configs/mmdet/_base_/models/ssd300.py new file mode 100644 index 00000000..f17df010 --- /dev/null +++ b/configs/mmdet/_base_/models/ssd300.py @@ -0,0 +1,56 @@ +# model settings +input_size = 300 +model = dict( + type='SingleStageDetector', + backbone=dict( + type='SSDVGG', + depth=16, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://vgg16_caffe')), + neck=dict( + type='SSDNeck', + in_channels=(512, 1024), + out_channels=(512, 1024, 512, 256, 256, 256), + level_strides=(2, 2, 1, 1), + level_paddings=(1, 1, 0, 0), + l2_norm_scale=20), + bbox_head=dict( + type='SSDHead', + in_channels=(512, 1024, 512, 256, 256, 256), + num_classes=80, + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2])), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200)) +cudnn_benchmark = True diff --git a/configs/mmdet/_base_/schedules/schedule_1x.py b/configs/mmdet/_base_/schedules/schedule_1x.py new file mode 100644 index 00000000..13b3783c --- /dev/null +++ b/configs/mmdet/_base_/schedules/schedule_1x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/_base_/schedules/schedule_20e.py b/configs/mmdet/_base_/schedules/schedule_20e.py new file mode 100644 index 00000000..00e85902 --- /dev/null +++ b/configs/mmdet/_base_/schedules/schedule_20e.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/_base_/schedules/schedule_2x.py b/configs/mmdet/_base_/schedules/schedule_2x.py new file mode 100644 index 00000000..69dc9ee8 --- /dev/null +++ b/configs/mmdet/_base_/schedules/schedule_2x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/albu_example/README.md b/configs/mmdet/albu_example/README.md new file mode 100644 index 00000000..49edbf3f --- /dev/null +++ b/configs/mmdet/albu_example/README.md @@ -0,0 +1,31 @@ +# Albu Example + +> [Albumentations: fast and flexible image augmentations](https://arxiv.org/abs/1809.06839) + + + +## Abstract + +Data augmentation is a commonly used technique for increasing both the size and the diversity of labeled training sets by leveraging input transformations that preserve output labels. In computer vision domain, image augmentations have become a common implicit regularization technique to combat overfitting in deep convolutional neural networks and are ubiquitously used to improve performance. While most deep learning frameworks implement basic image transformations, the list is typically limited to some variations and combinations of flipping, rotating, scaling, and cropping. Moreover, the image processing speed varies in existing tools for image augmentation. We present Albumentations, a fast and flexible library for image augmentations with many various image transform operations available, that is also an easy-to-use wrapper around other augmentation libraries. We provide examples of image augmentations for different computer vision tasks and show that Albumentations is faster than other commonly used image augmentation tools on the most of commonly used image transformations. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.4 | 16.6 | 38.0 | 34.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208-ab203bcd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208_225520.log.json) | + +## Citation + +```latex +@article{2018arXiv180906839B, + author = {A. Buslaev, A. Parinov, E. Khvedchenya, V.~I. Iglovikov and A.~A. Kalinin}, + title = "{Albumentations: fast and flexible image augmentations}", + journal = {ArXiv e-prints}, + eprint = {1809.06839}, + year = 2018 +} +``` diff --git a/configs/mmdet/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py b/configs/mmdet/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py new file mode 100644 index 00000000..b3f879a6 --- /dev/null +++ b/configs/mmdet/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py @@ -0,0 +1,73 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +albu_train_transforms = [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict( + type='OneOf', + transforms=[ + dict( + type='RGBShift', + r_shift_limit=10, + g_shift_limit=10, + b_shift_limit=10, + p=1.0), + dict( + type='HueSaturationValue', + hue_shift_limit=20, + sat_shift_limit=30, + val_shift_limit=20, + p=1.0) + ], + p=0.1), + dict(type='JpegCompression', quality_lower=85, quality_upper=95, p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), +] +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='Pad', size_divisor=32), + dict( + type='Albu', + transforms=albu_train_transforms, + bbox_params=dict( + type='BboxParams', + format='pascal_voc', + label_fields=['gt_labels'], + min_visibility=0.0, + filter_lost_elements=True), + keymap={ + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + }, + update_pad_shape=False, + skip_img_without_anno=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'pad_shape', 'scale_factor')) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/configs/mmdet/atss/README.md b/configs/mmdet/atss/README.md new file mode 100644 index 00000000..1bf69498 --- /dev/null +++ b/configs/mmdet/atss/README.md @@ -0,0 +1,31 @@ +# ATSS + +> [Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection](https://arxiv.org/abs/1912.02424) + + + +## Abstract + +Object detection has been dominated by anchor-based detectors for several years. Recently, anchor-free detectors have become popular due to the proposal of FPN and Focal Loss. In this paper, we first point out that the essential difference between anchor-based and anchor-free detection is actually how to define positive and negative training samples, which leads to the performance gap between them. If they adopt the same definition of positive and negative samples during training, there is no obvious difference in the final performance, no matter regressing from a box or a point. This shows that how to select positive and negative training samples is important for current object detectors. Then, we propose an Adaptive Training Sample Selection (ATSS) to automatically select positive and negative samples according to statistical characteristics of object. It significantly improves the performance of anchor-based and anchor-free detectors and bridges the gap between them. Finally, we discuss the necessity of tiling multiple anchors per location on the image to detect objects. Extensive experiments conducted on MS COCO support our aforementioned analysis and conclusions. With the newly introduced ATSS, we improve state-of-the-art detectors by a large margin to 50.7% AP without introducing any overhead. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.7 | 19.7 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209_102539.log.json) | +| R-101 | pytorch | 1x | 5.6 | 12.3 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.log.json) | + +## Citation + +```latex +@article{zhang2019bridging, + title = {Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection}, + author = {Zhang, Shifeng and Chi, Cheng and Yao, Yongqiang and Lei, Zhen and Li, Stan Z.}, + journal = {arXiv preprint arXiv:1912.02424}, + year = {2019} +} +``` diff --git a/configs/mmdet/atss/atss_r101_fpn_1x_coco.py b/configs/mmdet/atss/atss_r101_fpn_1x_coco.py new file mode 100644 index 00000000..5225d2ab --- /dev/null +++ b/configs/mmdet/atss/atss_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './atss_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/atss/atss_r50_fpn_1x_coco.py b/configs/mmdet/atss/atss_r50_fpn_1x_coco.py new file mode 100644 index 00000000..42ff4c59 --- /dev/null +++ b/configs/mmdet/atss/atss_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/atss/metafile.yml b/configs/mmdet/atss/metafile.yml new file mode 100644 index 00000000..f4c567ef --- /dev/null +++ b/configs/mmdet/atss/metafile.yml @@ -0,0 +1,60 @@ +Collections: + - Name: ATSS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ATSS + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1912.02424 + Title: 'Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection' + README: configs/atss/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/atss.py#L6 + Version: v2.0.0 + +Models: + - Name: atss_r50_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + inference time (ms/im): + - value: 50.76 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth + + - Name: atss_r101_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 81.3 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth diff --git a/configs/mmdet/autoassign/README.md b/configs/mmdet/autoassign/README.md new file mode 100644 index 00000000..8e8341a7 --- /dev/null +++ b/configs/mmdet/autoassign/README.md @@ -0,0 +1,35 @@ +# AutoAssign + +> [AutoAssign: Differentiable Label Assignment for Dense Object Detection](https://arxiv.org/abs/2007.03496) + + + +## Abstract + +Determining positive/negative samples for object detection is known as label assignment. Here we present an anchor-free detector named AutoAssign. It requires little human knowledge and achieves appearance-aware through a fully differentiable weighting mechanism. During training, to both satisfy the prior distribution of data and adapt to category characteristics, we present Center Weighting to adjust the category-specific prior distributions. To adapt to object appearances, Confidence Weighting is proposed to adjust the specific assign strategy of each instance. The two weighting modules are then combined to generate positive and negative weights to adjust each location's confidence. Extensive experiments on the MS COCO show that our method steadily surpasses other best sampling strategies by large margins with various backbones. Moreover, our best model achieves 52.1% AP, outperforming all existing one-stage detectors. Besides, experiments on other datasets, e.g., PASCAL VOC, Objects365, and WiderFace, demonstrate the broad applicability of AutoAssign. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:------:|:------:|:--------:| +| R-50 | caffe | 1x | 4.08 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.3 mAP. mAP 40.3 ~ 40.6 is acceptable. Such fluctuation can also be found in the original implementation. +2. You can get a more stable results ~ mAP 40.6 with a schedule total 13 epoch, and learning rate is divided by 10 at 10th and 13th epoch. + +## Citation + +```latex +@article{zhu2020autoassign, + title={AutoAssign: Differentiable Label Assignment for Dense Object Detection}, + author={Zhu, Benjin and Wang, Jianfeng and Jiang, Zhengkai and Zong, Fuhang and Liu, Songtao and Li, Zeming and Sun, Jian}, + journal={arXiv preprint arXiv:2007.03496}, + year={2020} +} +``` diff --git a/configs/mmdet/autoassign/autoassign_r50_fpn_8x2_1x_coco.py b/configs/mmdet/autoassign/autoassign_r50_fpn_8x2_1x_coco.py new file mode 100644 index 00000000..db548dc3 --- /dev/null +++ b/configs/mmdet/autoassign/autoassign_r50_fpn_8x2_1x_coco.py @@ -0,0 +1,85 @@ +# We follow the original implementation which +# adopts the Caffe pre-trained backbone. +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='AutoAssign', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + relu_before_extra_convs=True, + init_cfg=dict(type='Caffe2Xavier', layer='Conv2d')), + bbox_head=dict( + type='AutoAssignHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_bbox=dict(type='GIoULoss', loss_weight=5.0)), + train_cfg=None, + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(lr=0.01, paramwise_cfg=dict(norm_decay_mult=0.)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[8, 11]) +total_epochs = 12 diff --git a/configs/mmdet/autoassign/metafile.yml b/configs/mmdet/autoassign/metafile.yml new file mode 100644 index 00000000..f1e90519 --- /dev/null +++ b/configs/mmdet/autoassign/metafile.yml @@ -0,0 +1,33 @@ +Collections: + - Name: AutoAssign + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - AutoAssign + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2007.03496 + Title: 'AutoAssign: Differentiable Label Assignment for Dense Object Detection' + README: configs/autoassign/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/detectors/autoassign.py#L6 + Version: v2.12.0 + +Models: + - Name: autoassign_r50_fpn_8x2_1x_coco + In Collection: AutoAssign + Config: configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py + Metadata: + Training Memory (GB): 4.08 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth diff --git a/configs/mmdet/carafe/README.md b/configs/mmdet/carafe/README.md new file mode 100644 index 00000000..983aafb4 --- /dev/null +++ b/configs/mmdet/carafe/README.md @@ -0,0 +1,42 @@ +# CARAFE + +> [CARAFE: Content-Aware ReAssembly of FEatures](https://arxiv.org/abs/1905.02188) + + + +## Abstract + +Feature upsampling is a key operation in a number of modern convolutional network architectures, e.g. feature pyramids. Its design is critical for dense prediction tasks such as object detection and semantic/instance segmentation. In this work, we propose Content-Aware ReAssembly of FEatures (CARAFE), a universal, lightweight and highly effective operator to fulfill this goal. CARAFE has several appealing properties: (1) Large field of view. Unlike previous works (e.g. bilinear interpolation) that only exploit sub-pixel neighborhood, CARAFE can aggregate contextual information within a large receptive field. (2) Content-aware handling. Instead of using a fixed kernel for all samples (e.g. deconvolution), CARAFE enables instance-specific content-aware handling, which generates adaptive kernels on-the-fly. (3) Lightweight and fast to compute. CARAFE introduces little computational overhead and can be readily integrated into modern network architectures. We conduct comprehensive evaluations on standard benchmarks in object detection, instance/semantic segmentation and inpainting. CARAFE shows consistent and substantial gains across all the tasks (1.2%, 1.3%, 1.8%, 1.1db respectively) with negligible computational overhead. It has great potential to serve as a strong building block for future research. It has great potential to serve as a strong building block for future research. + +
+ +
+ +## Results and Models + +The results on COCO 2017 val is shown in the below table. + +| Method | Backbone | Style | Lr schd | Test Proposal Num | Inf time (fps) | Box AP | Mask AP | Config | Download | +|:--------------------:|:--------:|:-------:|:-------:|:-----------------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 16.5 | 38.6 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_20200504_175733.log.json) | +| - | - | - | - | 2000 | | | | | +| Mask R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 14.0 | 39.3 | 35.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_20200503_135957.log.json) | +| - | - | - | - | 2000 | | | | | + +## Implementation + +The CUDA implementation of CARAFE can be find at https://github.com/myownskyW7/CARAFE. + +## Citation + +We provide config files to reproduce the object detection & instance segmentation results in the ICCV 2019 Oral paper for [CARAFE: Content-Aware ReAssembly of FEatures](https://arxiv.org/abs/1905.02188). + +```latex +@inproceedings{Wang_2019_ICCV, + title = {CARAFE: Content-Aware ReAssembly of FEatures}, + author = {Wang, Jiaqi and Chen, Kai and Xu, Rui and Liu, Ziwei and Loy, Chen Change and Lin, Dahua}, + booktitle = {The IEEE International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2019} +} +``` diff --git a/configs/mmdet/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py b/configs/mmdet/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 00000000..dedac3f4 --- /dev/null +++ b/configs/mmdet/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py b/configs/mmdet/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 00000000..668c0239 --- /dev/null +++ b/configs/mmdet/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)), + roi_head=dict( + mask_head=dict( + upsample_cfg=dict( + type='carafe', + scale_factor=2, + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/carafe/metafile.yml b/configs/mmdet/carafe/metafile.yml new file mode 100644 index 00000000..b58a3f69 --- /dev/null +++ b/configs/mmdet/carafe/metafile.yml @@ -0,0 +1,55 @@ +Collections: + - Name: CARAFE + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - FPN_CARAFE + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1905.02188 + Title: 'CARAFE: Content-Aware ReAssembly of FEatures' + README: configs/carafe/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/necks/fpn_carafe.py#L11 + Version: v2.12.0 + +Models: + - Name: faster_rcnn_r50_fpn_carafe_1x_coco + In Collection: CARAFE + Config: configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py + Metadata: + Training Memory (GB): 4.26 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth + + - Name: mask_rcnn_r50_fpn_carafe_1x_coco + In Collection: CARAFE + Config: configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py + Metadata: + Training Memory (GB): 4.31 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth diff --git a/configs/mmdet/cascade_rcnn/README.md b/configs/mmdet/cascade_rcnn/README.md new file mode 100644 index 00000000..109fd7c3 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/README.md @@ -0,0 +1,79 @@ +# Cascade R-CNN + +> [Cascade R-CNN: High Quality Object Detection and Instance Segmentation](https://arxiv.org/abs/1906.09756) + + + +## Abstract + +In object detection, the intersection over union (IoU) threshold is frequently used to define positives/negatives. The threshold used to train a detector defines its quality. While the commonly used threshold of 0.5 leads to noisy (low-quality) detections, detection performance frequently degrades for larger thresholds. This paradox of high-quality detection has two causes: 1) overfitting, due to vanishing positive samples for large thresholds, and 2) inference-time quality mismatch between detector and test hypotheses. A multi-stage object detection architecture, the Cascade R-CNN, composed of a sequence of detectors trained with increasing IoU thresholds, is proposed to address these problems. The detectors are trained sequentially, using the output of a detector as training set for the next. This resampling progressively improves hypotheses quality, guaranteeing a positive training set of equivalent size for all detectors and minimizing overfitting. The same cascade is applied at inference, to eliminate quality mismatches between hypotheses and detectors. An implementation of the Cascade R-CNN without bells or whistles achieves state-of-the-art performance on the COCO dataset, and significantly improves high-quality detection on generic and specific object detection datasets, including VOC, KITTI, CityPerson, and WiderFace. Finally, the Cascade R-CNN is generalized to instance segmentation, with nontrivial improvements over the Mask R-CNN. + +
+ +
+ +## Results and Models + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: |:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.2 | | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_20200504_174853.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316_214748.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_20200504_175131.log.json) | +| R-101-FPN | caffe | 1x | 6.2 | | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_20200504_175649.log.json) | +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317_101744.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_20200504_231812.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 10.9 | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316_055608.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 7.6 | | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 10.7 | | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357.log.json)| + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 5.9 | | 41.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_20200504_174659.log.json) | +| R-50-FPN | pytorch | 1x | 6.0 | 11.2 | 41.2 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203_170449.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.9 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_20200504_174711.log.json)| +| R-101-FPN | caffe | 1x | 7.8 | | 43.2 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_20200504_174813.log.json)| +| R-101-FPN | pytorch | 1x | 7.9 | 9.8 | 42.9 | 37.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203_092521.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 43.4 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_20200504_174836.log.json)| +| X-101-32x4d-FPN | pytorch | 1x | 9.2 | 8.6 | 44.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201_052416.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 9.2 | - | 45.0 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 12.2 | 6.7 | 45.3 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203_044059.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 12.2 | | 45.6 |39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033.log.json)| + +**Notes:** + +- The `20e` schedule in Cascade (Mask) R-CNN indicates decreasing the lr at 16 and 19 epochs, with a total of 20 epochs. + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training for Cascade Mask R-CNN. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :----------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 3x | 5.7 | | 44.0 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651-6e29b3a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651.log.json) +| R-50-FPN | pytorch| 3x | 5.9 | | 44.3 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719-5bdc3824.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719.log.json) +| R-101-FPN | caffe | 3x | 7.7 | | 45.4 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620-a5bd2389.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620.log.json) +| R-101-FPN | pytorch| 3x | 7.8 | | 45.5 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236-51a2d363.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236.log.json) +| X-101-32x4d-FPN | pytorch| 3x | 9.0 | | 46.3 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234-40773067.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234.log.json) +| X-101-32x8d-FPN | pytorch| 3x | 12.1 | | 46.1 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640-9ff7e76f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640.log.json) +| X-101-64x4d-FPN | pytorch| 3x | 12.0 | | 46.6 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311-d3e64ba0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311.log.json) + +## Citation + +```latex +@article{Cai_2019, + title={Cascade R-CNN: High Quality Object Detection and Instance Segmentation}, + ISSN={1939-3539}, + url={http://dx.doi.org/10.1109/tpami.2019.2956516}, + DOI={10.1109/tpami.2019.2956516}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Cai, Zhaowei and Vasconcelos, Nuno}, + year={2019}, + pages={1–1} +} +``` diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..5ee62310 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..1df87fc6 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..f59c1558 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 00000000..45ab7edf --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..1b20f167 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..12d37efc --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_1x_coco.py'] + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..9fb817e8 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,49 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py'] +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..49ab539a --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 00000000..1296dc45 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_20e.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..ed0c6d1a --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = [ + '../common/mstrain_3x_coco_instance.py', + '../_base_/models/cascade_mask_rcnn_r50_fpn.py' +] diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..06cbbe70 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 00000000..4e352362 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..7d37d17d --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..eeec1aa1 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py @@ -0,0 +1,60 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +# ResNeXt-101-32x8d model trained with Caffe2 at FB, +# so the mean and std need to be changed. +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..7dbef5fa --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 00000000..579b1aca --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..ed6cf4b5 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..1e90f4bb --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..5c077760 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 00000000..b1719c25 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..696bcfb9 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..87e21fbf --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 00000000..6f886e1c --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..5ac02c10 --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 00000000..486e45ea --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..78229f0d --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 00000000..58812dec --- /dev/null +++ b/configs/mmdet/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/cascade_rcnn/metafile.yml b/configs/mmdet/cascade_rcnn/metafile.yml new file mode 100644 index 00000000..1007f2eb --- /dev/null +++ b/configs/mmdet/cascade_rcnn/metafile.yml @@ -0,0 +1,525 @@ +Collections: + - Name: Cascade R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Cascade R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: http://dx.doi.org/10.1109/tpami.2019.2956516 + Title: 'Cascade R-CNN: Delving into High Quality Object Detection' + README: configs/cascade_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/cascade_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: cascade_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth + + - Name: cascade_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth + + - Name: cascade_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth + + - Name: cascade_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth + + - Name: cascade_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth + + - Name: cascade_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.6 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth + + - Name: cascade_mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth + + - Name: cascade_mask_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth + + - Name: cascade_mask_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth + + - Name: cascade_mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth + + - Name: cascade_mask_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth + + - Name: cascade_mask_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 149.25 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth + + - Name: cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210707_002651-6e29b3a6.pth + + - Name: cascade_mask_rcnn_r50_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco/cascade_mask_rcnn_r50_fpn_mstrain_3x_coco_20210628_164719-5bdc3824.pth + + - Name: cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210707_002620-a5bd2389.pth + + - Name: cascade_mask_rcnn_r101_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco/cascade_mask_rcnn_r101_fpn_mstrain_3x_coco_20210628_165236-51a2d363.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 9.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210706_225234-40773067.pth + + - Name: cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 12.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210719_180640-9ff7e76f.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 12.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco/cascade_mask_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210719_210311-d3e64ba0.pth diff --git a/configs/mmdet/cascade_rpn/README.md b/configs/mmdet/cascade_rpn/README.md new file mode 100644 index 00000000..900dc291 --- /dev/null +++ b/configs/mmdet/cascade_rpn/README.md @@ -0,0 +1,41 @@ +# Cascade RPN + +> [Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution](https://arxiv.org/abs/1909.06720) + + + +## Abstract + +This paper considers an architecture referred to as Cascade Region Proposal Network (Cascade RPN) for improving the region-proposal quality and detection performance by systematically addressing the limitation of the conventional RPN that heuristically defines the anchors and aligns the features to the anchors. First, instead of using multiple anchors with predefined scales and aspect ratios, Cascade RPN relies on a single anchor per location and performs multi-stage refinement. Each stage is progressively more stringent in defining positive samples by starting out with an anchor-free metric followed by anchor-based metrics in the ensuing stages. Second, to attain alignment between the features and the anchors throughout the stages, adaptive convolution is proposed that takes the anchors in addition to the image features as its input and learns the sampled features guided by the anchors. A simple implementation of a two-stage Cascade RPN achieves AR 13.4 points higher than that of the conventional RPN, surpassing any existing region proposal methods. When adopting to Fast R-CNN and Faster R-CNN, Cascade RPN can improve the detection mAP by 3.1 and 3.5 points, respectively. + +
+ +
+ +## Results and Models + +### Region proposal performance + +| Method | Backbone | Style | Mem (GB) | Train time (s/iter) | Inf time (fps) | AR 1000 | Config | Download | +|:------:|:--------:|:-----:|:--------:|:-------------------:|:--------------:|:-------:|:-------:|:--------------------------------------:| +| CRPN | R-50-FPN | caffe | - | - | - | 72.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_r50_caffe_fpn_1x_coco/cascade_rpn_r50_caffe_fpn_1x_coco-7aa93cef.pth) | + +### Detection performance + +| Method | Proposal | Backbone | Style | Schedule | Mem (GB) | Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:-------------:|:-----------:|:--------:|:-------:|:--------:|:--------:|:-------------------:|:--------------:|:------:|:-------:|:--------------------------------------------:| +| Fast R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco/crpn_fast_rcnn_r50_caffe_fpn_1x_coco-cb486e66.pth) | +| Faster R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco/crpn_faster_rcnn_r50_caffe_fpn_1x_coco-c8283cca.pth) | + +## Citation + +We provide the code for reproducing experiment results of [Cascade RPN](https://arxiv.org/abs/1909.06720). + +```latex +@inproceedings{vu2019cascade, + title={Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution}, + author={Vu, Thang and Jang, Hyunjun and Pham, Trung X and Yoo, Chang D}, + booktitle={Conference on Neural Information Processing Systems (NeurIPS)}, + year={2019} +} +``` diff --git a/configs/mmdet/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..29f5d074 --- /dev/null +++ b/configs/mmdet/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..bad86e6d --- /dev/null +++ b/configs/mmdet/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,92 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +rpn_weight = 0.7 +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0 * rpn_weight), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)) + ]), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ], + rpn_proposal=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..5562e696 --- /dev/null +++ b/configs/mmdet/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)) + ]), + train_cfg=dict(rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.8), + min_bbox_size=0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/cascade_rpn/metafile.yml b/configs/mmdet/cascade_rpn/metafile.yml new file mode 100644 index 00000000..335b2bc7 --- /dev/null +++ b/configs/mmdet/cascade_rpn/metafile.yml @@ -0,0 +1,44 @@ +Collections: + - Name: Cascade RPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Cascade RPN + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1909.06720 + Title: 'Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution' + README: configs/cascade_rpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.8.0/mmdet/models/dense_heads/cascade_rpn_head.py#L538 + Version: v2.8.0 + +Models: + - Name: crpn_fast_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade RPN + Config: configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco/crpn_fast_rcnn_r50_caffe_fpn_1x_coco-cb486e66.pth + + - Name: crpn_faster_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade RPN + Config: configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco/crpn_faster_rcnn_r50_caffe_fpn_1x_coco-c8283cca.pth diff --git a/configs/mmdet/centernet/README.md b/configs/mmdet/centernet/README.md new file mode 100644 index 00000000..ffc1d8c2 --- /dev/null +++ b/configs/mmdet/centernet/README.md @@ -0,0 +1,40 @@ +# CenterNet + +> [Objects as Points](https://arxiv.org/abs/1904.07850) + + + +## Abstract + +Detection identifies objects as axis-aligned boxes in an image. Most successful object detectors enumerate a nearly exhaustive list of potential object locations and classify each. This is wasteful, inefficient, and requires additional post-processing. In this paper, we take a different approach. We model an object as a single point --- the center point of its bounding box. Our detector uses keypoint estimation to find center points and regresses to all other object properties, such as size, 3D location, orientation, and even pose. Our center point based approach, CenterNet, is end-to-end differentiable, simpler, faster, and more accurate than corresponding bounding box based detectors. CenterNet achieves the best speed-accuracy trade-off on the MS COCO dataset, with 28.1% AP at 142 FPS, 37.4% AP at 52 FPS, and 45.1% AP with multi-scale testing at 1.4 FPS. We use the same approach to estimate 3D bounding box in the KITTI benchmark and human pose on the COCO keypoint dataset. Our method performs competitively with sophisticated multi-stage methods and runs in real-time. + +
+ +
+ +## Results and Models + +| Backbone | DCN | Mem (GB) | Box AP | Flip box AP| Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :----: | +| ResNet-18 | N | 3.45 | 25.9 | 27.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630-bb5b3bf7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630.log.json) | +| ResNet-18 | Y | 3.47 | 29.5 | 30.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131-c8cd631f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131.log.json) | + +Note: + +- Flip box AP setting is single-scale and `flip=True`. +- Due to complex data enhancement, we find that the performance is unstable and may fluctuate by about 0.4 mAP. mAP 29.4 ~ 29.8 is acceptable in ResNet-18-DCNv2. +- Compared to the source code, we refer to [CenterNet-Better](https://github.com/FateScript/CenterNet-better), and make the following changes + - fix wrong image mean and variance in image normalization to be compatible with the pre-trained backbone. + - Use SGD rather than ADAM optimizer and add warmup and grad clip. + - Use DistributedDataParallel as other models in MMDetection rather than using DataParallel. + +## Citation + +```latex +@article{zhou2019objects, + title={Objects as Points}, + author={Zhou, Xingyi and Wang, Dequan and Kr{\"a}henb{\"u}hl, Philipp}, + booktitle={arXiv preprint arXiv:1904.07850}, + year={2019} +} +``` diff --git a/configs/mmdet/centernet/centernet_resnet18_140e_coco.py b/configs/mmdet/centernet/centernet_resnet18_140e_coco.py new file mode 100644 index 00000000..52c86a5e --- /dev/null +++ b/configs/mmdet/centernet/centernet_resnet18_140e_coco.py @@ -0,0 +1,3 @@ +_base_ = './centernet_resnet18_dcnv2_140e_coco.py' + +model = dict(neck=dict(use_dcn=False)) diff --git a/configs/mmdet/centernet/centernet_resnet18_dcnv2_140e_coco.py b/configs/mmdet/centernet/centernet_resnet18_dcnv2_140e_coco.py new file mode 100644 index 00000000..b8a0bb10 --- /dev/null +++ b/configs/mmdet/centernet/centernet_resnet18_dcnv2_140e_coco.py @@ -0,0 +1,127 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='CenterNet', + backbone=dict( + type='ResNet', + depth=18, + norm_eval=False, + norm_cfg=dict(type='BN'), + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='CTResNetNeck', + in_channel=512, + num_deconv_filters=(256, 128, 64), + num_deconv_kernels=(4, 4, 4), + use_dcn=True), + bbox_head=dict( + type='CenterNetHead', + num_classes=80, + in_channel=64, + feat_channel=64, + loss_center_heatmap=dict(type='GaussianFocalLoss', loss_weight=1.0), + loss_wh=dict(type='L1Loss', loss_weight=0.1), + loss_offset=dict(type='L1Loss', loss_weight=1.0)), + train_cfg=None, + test_cfg=dict(topk=100, local_maximum_kernel=3, max_per_img=100)) + +# We fixed the incorrect img_norm_cfg problem in the source code. +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True, color_type='color'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(512, 512), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_pad_mode=None), + dict(type='Resize', img_scale=(512, 512), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict( + type='RandomCenterCropPad', + ratios=None, + border=None, + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_mode=True, + test_pad_mode=['logical_or', 31], + test_pad_add_pix=1), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + meta_keys=('filename', 'ori_filename', 'ori_shape', + 'img_shape', 'pad_shape', 'scale_factor', 'flip', + 'flip_direction', 'img_norm_cfg', 'border'), + keys=['img']) + ]) +] + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=16, + workers_per_gpu=4, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +# Based on the default settings of modern detectors, the SGD effect is better +# than the Adam in the source code, so we use SGD default settings and +# if you use adam+lr5e-4, the map is 29.1. +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) + +# learning policy +# Based on the default settings of modern detectors, we added warmup settings. +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[18, 24]) # the real step is [18*5, 24*5] +runner = dict(max_epochs=28) # the real epoch is 28*5=140 + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (16 samples per GPU) +auto_scale_lr = dict(base_batch_size=128) diff --git a/configs/mmdet/centernet/metafile.yml b/configs/mmdet/centernet/metafile.yml new file mode 100644 index 00000000..e86e57b5 --- /dev/null +++ b/configs/mmdet/centernet/metafile.yml @@ -0,0 +1,46 @@ +Collections: + - Name: CenterNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x TITANXP GPUs + Architecture: + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.07850 + Title: 'Objects as Points' + README: configs/centernet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.13.0/mmdet/models/detectors/centernet.py#L10 + Version: v2.13.0 + +Models: + - Name: centernet_resnet18_dcnv2_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_dcnv2_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.47 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 29.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210702_155131-c8cd631f.pth + + - Name: centernet_resnet18_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.45 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 25.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210705_093630-bb5b3bf7.pth diff --git a/configs/mmdet/centripetalnet/README.md b/configs/mmdet/centripetalnet/README.md new file mode 100644 index 00000000..1a5a346b --- /dev/null +++ b/configs/mmdet/centripetalnet/README.md @@ -0,0 +1,36 @@ +# CentripetalNet + +> [CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection](https://arxiv.org/abs/2003.09119) + + + +## Abstract + +Keypoint-based detectors have achieved pretty-well performance. However, incorrect keypoint matching is still widespread and greatly affects the performance of the detector. In this paper, we propose CentripetalNet which uses centripetal shift to pair corner keypoints from the same instance. CentripetalNet predicts the position and the centripetal shift of the corner points and matches corners whose shifted results are aligned. Combining position information, our approach matches corner points more accurately than the conventional embedding approaches do. Corner pooling extracts information inside the bounding boxes onto the border. To make this information more aware at the corners, we design a cross-star deformable convolution network to conduct feature adaption. Furthermore, we explore instance segmentation on anchor-free detectors by equipping our CentripetalNet with a mask prediction module. On MS-COCO test-dev, our CentripetalNet not only outperforms all existing anchor-free detectors with an AP of 48.0% but also achieves comparable performance to the state-of-the-art instance segmentation approaches with a 40.2% MaskAP. + +
+ +
+ +## Results and Models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [16 x 6](./centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | 190/210 | 16.7 | 3.7 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- The model we released is the best checkpoint rather than the latest checkpoint (box AP 44.8 vs 44.6 in our experiment). + +## Citation + +```latex +@InProceedings{Dong_2020_CVPR, +author = {Dong, Zhiwei and Li, Guoxuan and Liao, Yue and Wang, Fei and Ren, Pengju and Qian, Chen}, +title = {CentripetalNet: Pursuing High-Quality Keypoint Pairs for Object Detection}, +booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, +month = {June}, +year = {2020} +} +``` diff --git a/configs/mmdet/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py b/configs/mmdet/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py new file mode 100644 index 00000000..5281c5bf --- /dev/null +++ b/configs/mmdet/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py @@ -0,0 +1,110 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CentripetalHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=0, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1), + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[190]) +runner = dict(type='EpochBasedRunner', max_epochs=210) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (16 GPUs) x (6 samples per GPU) +auto_scale_lr = dict(base_batch_size=96) diff --git a/configs/mmdet/centripetalnet/metafile.yml b/configs/mmdet/centripetalnet/metafile.yml new file mode 100644 index 00000000..61aed3e5 --- /dev/null +++ b/configs/mmdet/centripetalnet/metafile.yml @@ -0,0 +1,39 @@ +Collections: + - Name: CentripetalNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 16x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: + URL: https://arxiv.org/abs/2003.09119 + Title: 'CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection' + README: configs/centripetalnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.5.0/mmdet/models/detectors/cornernet.py#L9 + Version: v2.5.0 + +Models: + - Name: centripetalnet_hourglass104_mstest_16x6_210e_coco + In Collection: CentripetalNet + Config: configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py + Metadata: + Batch Size: 96 + Training Memory (GB): 16.7 + inference time (ms/im): + - value: 270.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth diff --git a/configs/mmdet/cityscapes/README.md b/configs/mmdet/cityscapes/README.md new file mode 100644 index 00000000..7522ffe4 --- /dev/null +++ b/configs/mmdet/cityscapes/README.md @@ -0,0 +1,46 @@ +# Cityscapes + +> [The Cityscapes Dataset for Semantic Urban Scene Understanding](https://arxiv.org/abs/1604.01685) + + + +## Abstract + +Visual understanding of complex urban street scenes is an enabling factor for a wide range of applications. Object detection has benefited enormously from large-scale datasets, especially in the context of deep learning. For semantic urban scene understanding, however, no current dataset adequately captures the complexity of real-world urban scenes. +To address this, we introduce Cityscapes, a benchmark suite and large-scale dataset to train and test approaches for pixel-level and instance-level semantic labeling. Cityscapes is comprised of a large, diverse set of stereo video sequences recorded in streets from 50 different cities. 5000 of these images have high quality pixel-level annotations; 20000 additional images have coarse annotations to enable methods that leverage large volumes of weakly-labeled data. Crucially, our effort exceeds previous attempts in terms of dataset size, annotation richness, scene variability, and complexity. Our accompanying empirical study provides an in-depth analysis of the dataset characteristics, as well as a performance evaluation of several state-of-the-art approaches based on our benchmark. + +
+ +
+ +## Common settings + +- All baselines were trained using 8 GPU with a batch size of 8 (1 images per GPU) using the [linear scaling rule](https://arxiv.org/abs/1706.02677) to scale the learning rate. +- All models were trained on `cityscapes_train`, and tested on `cityscapes_val`. +- 1x training schedule indicates 64 epochs which corresponds to slightly less than the 24k iterations reported in the original schedule from the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870) +- COCO pre-trained weights are used to initialize. +- A conversion [script](../../tools/dataset_converters/cityscapes.py) is provided to convert Cityscapes into COCO format. Please refer to [install.md](../../docs/1_exist_data_model.md#prepare-datasets) for details. +- `CityscapesDataset` implemented three evaluation methods. `bbox` and `segm` are standard COCO bbox/mask AP. `cityscapes` is the cityscapes dataset official evaluation, which may be slightly higher than COCO. + +### Faster R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :---: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.2 | - | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502-829424c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502_114915.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------: | :------------: | :----: | :-----: | :------: | :------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.3 | - | 40.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733-d2858245.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733.log.json) | + +## Citation + +```latex +@inproceedings{Cordts2016Cityscapes, + title={The Cityscapes Dataset for Semantic Urban Scene Understanding}, + author={Cordts, Marius and Omran, Mohamed and Ramos, Sebastian and Rehfeld, Timo and Enzweiler, Markus and Benenson, Rodrigo and Franke, Uwe and Roth, Stefan and Schiele, Bernt}, + booktitle={Proc. of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2016} +} +``` diff --git a/configs/mmdet/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py b/configs/mmdet/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 00000000..ca636bda --- /dev/null +++ b/configs/mmdet/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,44 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_detection.py', + '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' # noqa + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (1 samples per GPU) +auto_scale_lr = dict(base_batch_size=8) diff --git a/configs/mmdet/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py b/configs/mmdet/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 00000000..83ea058d --- /dev/null +++ b/configs/mmdet/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,51 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth' # noqa + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (1 samples per GPU) +auto_scale_lr = dict(base_batch_size=8) diff --git a/configs/mmdet/common/lsj_100e_coco_instance.py b/configs/mmdet/common/lsj_100e_coco_instance.py new file mode 100644 index 00000000..cacf23d7 --- /dev/null +++ b/configs/mmdet/common/lsj_100e_coco_instance.py @@ -0,0 +1,90 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +image_size = (1024, 1024) + +file_client_args = dict(backend='disk') +# comment out the code below to use different file client +# file_client_args = dict( +# backend='petrel', +# path_mapping=dict({ +# './data/': 's3://openmmlab/datasets/detection/', +# 'data/': 's3://openmmlab/datasets/detection/' +# })) + +train_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.1, 2.0), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=image_size, + recompute_bbox=True, + allow_negative_crop=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1e-2, 1e-2)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=image_size), # padding to image_size leads 0.5+ mAP + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=4, # simply change this from 2 to 16 for 50e - 400e training. + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) + +# optimizer assumes bs=64 +optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.00004) +optimizer_config = dict(grad_clip=None) + +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.067, + step=[22, 24]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/configs/mmdet/common/mstrain-poly_3x_coco_instance.py b/configs/mmdet/common/mstrain-poly_3x_coco_instance.py new file mode 100644 index 00000000..c22ed945 --- /dev/null +++ b/configs/mmdet/common/mstrain-poly_3x_coco_instance.py @@ -0,0 +1,80 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/common/mstrain_3x_coco.py b/configs/mmdet/common/mstrain_3x_coco.py new file mode 100644 index 00000000..80ec8b8d --- /dev/null +++ b/configs/mmdet/common/mstrain_3x_coco.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/common/mstrain_3x_coco_instance.py b/configs/mmdet/common/mstrain_3x_coco_instance.py new file mode 100644 index 00000000..50f39bef --- /dev/null +++ b/configs/mmdet/common/mstrain_3x_coco_instance.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/common/ssj_270k_coco_instance.py b/configs/mmdet/common/ssj_270k_coco_instance.py new file mode 100644 index 00000000..851098f8 --- /dev/null +++ b/configs/mmdet/common/ssj_270k_coco_instance.py @@ -0,0 +1,91 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +image_size = (1024, 1024) + +file_client_args = dict(backend='disk') + +# Standard Scale Jittering (SSJ) resizes and crops an image +# with a resize range of 0.8 to 1.25 of the original image size. +train_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.8, 1.25), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=image_size, + recompute_bbox=True, + allow_negative_crop=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1e-2, 1e-2)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=image_size), # padding to image_size leads 0.5+ mAP + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) + +evaluation = dict(interval=6000, metric=['bbox', 'segm']) + +# optimizer assumes batch_size = (32 GPUs) x (2 samples per GPU) +optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.00004) +optimizer_config = dict(grad_clip=None) + +# lr steps at [0.9, 0.95, 0.975] of the maximum iterations +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.001, + step=[243000, 256500, 263250]) +checkpoint_config = dict(interval=6000) +# The model is trained by 270k iterations with batch_size 64, +# which is roughly equivalent to 144 epochs. +runner = dict(type='IterBasedRunner', max_iters=270000) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/common/ssj_scp_270k_coco_instance.py b/configs/mmdet/common/ssj_scp_270k_coco_instance.py new file mode 100644 index 00000000..540839ff --- /dev/null +++ b/configs/mmdet/common/ssj_scp_270k_coco_instance.py @@ -0,0 +1,97 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +image_size = (1024, 1024) + +file_client_args = dict(backend='disk') + +# Standard Scale Jittering (SSJ) resizes and crops an image +# with a resize range of 0.8 to 1.25 of the original image size. +load_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.8, 1.25), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=image_size, + recompute_bbox=True, + allow_negative_crop=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(1e-2, 1e-2)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Pad', size=image_size), +] +train_pipeline = [ + dict(type='CopyPaste', max_num_pasted=100), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', file_client_args=file_client_args), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='MultiImageMixDataset', + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=load_pipeline), + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) + +evaluation = dict(interval=6000, metric=['bbox', 'segm']) + +# optimizer assumes batch_size = (32 GPUs) x (2 samples per GPU) +optimizer = dict(type='SGD', lr=0.1, momentum=0.9, weight_decay=0.00004) +optimizer_config = dict(grad_clip=None) + +# lr steps at [0.9, 0.95, 0.975] of the maximum iterations +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.001, + step=[243000, 256500, 263250]) +checkpoint_config = dict(interval=6000) +# The model is trained by 270k iterations with batch_size 64, +# which is roughly equivalent to 144 epochs. +runner = dict(type='IterBasedRunner', max_iters=270000) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/cornernet/README.md b/configs/mmdet/cornernet/README.md new file mode 100644 index 00000000..55877c4c --- /dev/null +++ b/configs/mmdet/cornernet/README.md @@ -0,0 +1,43 @@ +# CornerNet + +> [Cornernet: Detecting objects as paired keypoints](https://arxiv.org/abs/1808.01244) + + + +## Abstract + +We propose CornerNet, a new approach to object detection where we detect an object bounding box as a pair of keypoints, the top-left corner and the bottom-right corner, using a single convolution neural network. By detecting objects as paired keypoints, we eliminate the need for designing a set of anchor boxes commonly used in prior single-stage detectors. In addition to our novel formulation, we introduce corner pooling, a new type of pooling layer that helps the network better localize corners. Experiments show that CornerNet achieves a 42.2% AP on MS COCO, outperforming all existing one-stage detectors. + +
+ +
+ +## Results and Models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [10 x 5](./cornernet_hourglass104_mstest_10x5_210e_coco.py) | 180/210 | 13.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720.log.json) | +| HourglassNet-104 | [8 x 6](./cornernet_hourglass104_mstest_8x6_210e_coco.py) | 180/210 | 15.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618.log.json) | +| HourglassNet-104 | [32 x 3](./cornernet_hourglass104_mstest_32x3_210e_coco.py) | 180/210 | 9.5 | 3.9 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- Experiments with `images_per_gpu=6` are conducted on Tesla V100-SXM2-32GB, `images_per_gpu=3` are conducted on GeForce GTX 1080 Ti. +- Here are the descriptions of each experiment setting: + - 10 x 5: 10 GPUs with 5 images per gpu. This is the same setting as that reported in the original paper. + - 8 x 6: 8 GPUs with 6 images per gpu. The total batchsize is similar to paper and only need 1 node to train. + - 32 x 3: 32 GPUs with 3 images per gpu. The default setting for 1080TI and need 4 nodes to train. + +## Citation + +```latex +@inproceedings{law2018cornernet, + title={Cornernet: Detecting objects as paired keypoints}, + author={Law, Hei and Deng, Jia}, + booktitle={15th European Conference on Computer Vision, ECCV 2018}, + pages={765--781}, + year={2018}, + organization={Springer Verlag} +} +``` diff --git a/configs/mmdet/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py new file mode 100644 index 00000000..6cb05a78 --- /dev/null +++ b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py @@ -0,0 +1,110 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=5, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (10 GPUs) x (5 samples per GPU) +auto_scale_lr = dict(base_batch_size=50) diff --git a/configs/mmdet/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py new file mode 100644 index 00000000..f539cdb8 --- /dev/null +++ b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py @@ -0,0 +1,110 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=3, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (3 samples per GPU) +auto_scale_lr = dict(base_batch_size=96) diff --git a/configs/mmdet/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py new file mode 100644 index 00000000..9b115d78 --- /dev/null +++ b/configs/mmdet/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py @@ -0,0 +1,110 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (6 samples per GPU) +auto_scale_lr = dict(base_batch_size=48) diff --git a/configs/mmdet/cornernet/metafile.yml b/configs/mmdet/cornernet/metafile.yml new file mode 100644 index 00000000..c2f6143a --- /dev/null +++ b/configs/mmdet/cornernet/metafile.yml @@ -0,0 +1,83 @@ +Collections: + - Name: CornerNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 8x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: + URL: https://arxiv.org/abs/1808.01244 + Title: 'CornerNet: Detecting Objects as Paired Keypoints' + README: configs/cornernet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.3.0/mmdet/models/detectors/cornernet.py#L9 + Version: v2.3.0 + +Models: + - Name: cornernet_hourglass104_mstest_10x5_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py + Metadata: + Training Resources: 10x V100 GPUs + Batch Size: 50 + Training Memory (GB): 13.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth + + - Name: cornernet_hourglass104_mstest_8x6_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py + Metadata: + Batch Size: 48 + Training Memory (GB): 15.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth + + - Name: cornernet_hourglass104_mstest_32x3_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py + Metadata: + Training Resources: 32x V100 GPUs + Batch Size: 96 + Training Memory (GB): 9.5 + inference time (ms/im): + - value: 256.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth diff --git a/configs/mmdet/dcn/README.md b/configs/mmdet/dcn/README.md new file mode 100644 index 00000000..7866078a --- /dev/null +++ b/configs/mmdet/dcn/README.md @@ -0,0 +1,48 @@ +# DCN + +> [Deformable Convolutional Networks](https://arxiv.org/abs/1703.06211) + + + +## Abstract + +Convolutional neural networks (CNNs) are inherently limited to model geometric transformations due to the fixed geometric structures in its building modules. In this work, we introduce two new modules to enhance the transformation modeling capacity of CNNs, namely, deformable convolution and deformable RoI pooling. Both are based on the idea of augmenting the spatial sampling locations in the modules with additional offsets and learning the offsets from target tasks, without additional supervision. The new modules can readily replace their plain counterparts in existing CNNs and can be easily trained end-to-end by standard back-propagation, giving rise to deformable convolutional networks. Extensive experiments validate the effectiveness of our approach on sophisticated vision tasks of object detection and semantic segmentation. + +
+ +
+ +## Results and Models + +| Backbone | Model | Style | Conv | Pool | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:----------------:|:------------:|:-------:|:-------------:|:------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 4.0 | 17.8 | 41.3 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_212941.log.json) | +| R-50-FPN | Faster | pytorch | - | dpool | 1x | 5.0 | 17.2 | 38.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307_203250.log.json) | +| R-101-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 12.5 | 42.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_230019.log.json) | +| X-101-32x4d-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 7.3 | 10.0 | 44.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203_001325.log.json) | +| R-50-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 15.4 | 41.8 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203_061339.log.json) | +| R-101-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 6.5 | 11.7 | 43.5 | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216_191601.log.json) | +| R-50-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 14.6 | 43.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_220843.log.json) | +| R-101-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 6.4 | 11.0 | 45.0 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_224829.log.json) | +| R-50-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 10.0 | 44.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202_010309.log.json) | +| R-101-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 8.0 | 8.6 | 45.8 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204_134006.log.json) | +| X-101-32x4d-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 9.2 | | 47.3 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-20200606_183737.log.json) | +| R-50-FPN (FP16) | Mask | pytorch | dconv(c3-c5) | - | 1x | 3.0 | | 41.9 | 37.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247.log.json) | + +**Notes:** + +- `dconv` denotes deformable convolution, `c3-c5` means adding dconv in resnet stage 3 to 5. `dpool` denotes deformable roi pooling. +- The dcn ops are modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch, which should be more memory efficient and slightly faster. +- (*) For R-50-FPN (dg=4), dg is short for deformable_group. This model is trained and tested on Amazon EC2 p3dn.24xlarge instance. +- **Memory, Train/Inf time is outdated.** + +## Citation + +```latex +@inproceedings{dai2017deformable, + title={Deformable Convolutional Networks}, + author={Dai, Jifeng and Qi, Haozhi and Xiong, Yuwen and Li, Yi and Zhang, Guodong and Hu, Han and Wei, Yichen}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` diff --git a/configs/mmdet/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..081b998f --- /dev/null +++ b/configs/mmdet/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..3b3683af --- /dev/null +++ b/configs/mmdet/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..daaa4729 --- /dev/null +++ b/configs/mmdet/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..a01df33c --- /dev/null +++ b/configs/mmdet/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..aa664bd6 --- /dev/null +++ b/configs/mmdet/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..f5fee7e1 --- /dev/null +++ b/configs/mmdet/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..8787088f --- /dev/null +++ b/configs/mmdet/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py b/configs/mmdet/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py new file mode 100644 index 00000000..1b695f0e --- /dev/null +++ b/configs/mmdet/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='DeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/configs/mmdet/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..e3bea195 --- /dev/null +++ b/configs/mmdet/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..cb340022 --- /dev/null +++ b/configs/mmdet/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..ababe58d --- /dev/null +++ b/configs/mmdet/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py b/configs/mmdet/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..ee5cca7d --- /dev/null +++ b/configs/mmdet/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/configs/mmdet/dcn/metafile.yml b/configs/mmdet/dcn/metafile.yml new file mode 100644 index 00000000..36f38871 --- /dev/null +++ b/configs/mmdet/dcn/metafile.yml @@ -0,0 +1,272 @@ +Collections: + - Name: Deformable Convolutional Networks + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + Paper: + URL: https://arxiv.org/abs/1703.06211 + Title: "Deformable Convolutional Networks" + README: configs/dcn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/dcn/deform_conv.py#L15 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 56.18 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth + + - Name: faster_rcnn_r50_fpn_dpool_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth + + - Name: faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 80 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth + + - Name: mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth + + - Name: mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py + Metadata: + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + Training Memory (GB): 3.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth + + - Name: mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth + + - Name: cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 68.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth + + - Name: cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth + + - Name: cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth + + - Name: cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth diff --git a/configs/mmdet/dcnv2/README.md b/configs/mmdet/dcnv2/README.md new file mode 100644 index 00000000..1e7e3201 --- /dev/null +++ b/configs/mmdet/dcnv2/README.md @@ -0,0 +1,37 @@ +# DCNv2 + +> [Deformable ConvNets v2: More Deformable, Better Results](https://arxiv.org/abs/1811.11168) + + + +## Abstract + +The superior performance of Deformable Convolutional Networks arises from its ability to adapt to the geometric variations of objects. Through an examination of its adaptive behavior, we observe that while the spatial support for its neural features conforms more closely than regular ConvNets to object structure, this support may nevertheless extend well beyond the region of interest, causing features to be influenced by irrelevant image content. To address this problem, we present a reformulation of Deformable ConvNets that improves its ability to focus on pertinent image regions, through increased modeling power and stronger training. The modeling power is enhanced through a more comprehensive integration of deformable convolution within the network, and by introducing a modulation mechanism that expands the scope of deformation modeling. To effectively harness this enriched modeling capability, we guide network training via a proposed feature mimicking scheme that helps the network to learn features that reflect the object focus and classification power of RCNN features. With the proposed contributions, this new version of Deformable ConvNets yields significant performance gains over the original model and produces leading results on the COCO benchmark for object detection and instance segmentation. + +## Results and Models + +| Backbone | Model | Style | Conv | Pool | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:----------------:|:------------:|:-------:|:-------------:|:------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.1 | 17.6 | 41.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130_222144.log.json) | +| *R-50-FPN (dg=4) | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.2 | 17.4 | 41.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130_222058.log.json) | +| R-50-FPN | Faster | pytorch | - | mdpool | 1x | 5.8 | 16.6 | 38.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcnv2/faster_rcnn_r50_fpn_mdpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307_203304.log.json) | +| R-50-FPN | Mask | pytorch | mdconv(c3-c5) | - | 1x | 4.5 | 15.1 | 41.5 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcnv2/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203_063443.log.json) | +| R-50-FPN (FP16) | Mask | pytorch | mdconv(c3-c5)| - | 1x | 3.1 | | 42.0 | 37.6 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434.log.json) | + +**Notes:** + +- `mdconv` denotes modulated deformable convolution, `c3-c5` means adding dconv in resnet stage 3 to 5. `mdpool` denotes modulated deformable roi pooling. +- The dcn ops are modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch, which should be more memory efficient and slightly faster. +- (*) For R-50-FPN (dg=4), dg is short for deformable_group. This model is trained and tested on Amazon EC2 p3dn.24xlarge instance. +- **Memory, Train/Inf time is outdated.** + +## Citation + +```latex +@article{zhu2018deformable, + title={Deformable ConvNets v2: More Deformable, Better Results}, + author={Zhu, Xizhou and Hu, Han and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1811.11168}, + year={2018} +} +``` diff --git a/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..d1bcf3c1 --- /dev/null +++ b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py new file mode 100644 index 00000000..d0ab89c2 --- /dev/null +++ b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=4, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdpool_1x_coco.py b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdpool_1x_coco.py new file mode 100644 index 00000000..ad7b0346 --- /dev/null +++ b/configs/mmdet/dcnv2/faster_rcnn_r50_fpn_mdpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='ModulatedDeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py b/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..7e21454b --- /dev/null +++ b/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..5ca2a67c --- /dev/null +++ b/configs/mmdet/dcnv2/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/dcnv2/metafile.yml b/configs/mmdet/dcnv2/metafile.yml new file mode 100644 index 00000000..90494215 --- /dev/null +++ b/configs/mmdet/dcnv2/metafile.yml @@ -0,0 +1,123 @@ +Collections: + - Name: Deformable Convolutional Networks v2 + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + Paper: + URL: https://arxiv.org/abs/1811.11168 + Title: "Deformable ConvNets v2: More Deformable, Better Results" + README: configs/dcnv2/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/dcn/deform_conv.py#L15 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks v2 + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.1 + inference time (ms/im): + - value: 56.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth + + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco + In Collection: Deformable Convolutional Networks v2 + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth + + - Name: faster_rcnn_r50_fpn_mdpool_1x_coco + In Collection: Deformable Convolutional Networks v2 + Config: configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth + + - Name: mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks v2 + Config: configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth + + - Name: mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks v2 + Config: configs/dcn/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 3.1 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth diff --git a/configs/mmdet/deepfashion/README.md b/configs/mmdet/deepfashion/README.md new file mode 100644 index 00000000..dd4f012b --- /dev/null +++ b/configs/mmdet/deepfashion/README.md @@ -0,0 +1,70 @@ +# DeepFashion + +> [DeepFashion: Powering Robust Clothes Recognition and Retrieval With Rich Annotations](https://openaccess.thecvf.com/content_cvpr_2016/html/Liu_DeepFashion_Powering_Robust_CVPR_2016_paper.html) + + + +## Abstract + +Recent advances in clothes recognition have been driven by the construction of clothes datasets. Existing datasets are limited in the amount of annotations and are difficult to cope with the various challenges in real-world applications. In this work, we introduce DeepFashion, a large-scale clothes dataset with comprehensive annotations. It contains over 800,000 images, which are richly annotated with massive attributes, clothing landmarks, and correspondence of images taken under different scenarios including store, street snapshot, and consumer. Such rich annotations enable the development of powerful algorithms in clothes recognition and facilitating future researches. To demonstrate the advantages of DeepFashion, we propose a new deep model, namely FashionNet, which learns clothing features by jointly predicting clothing attributes and landmarks. The estimated landmarks are then employed to pool or gate the learned features. It is optimized in an iterative manner. Extensive experiments demonstrate the effectiveness of FashionNet and the usefulness of DeepFashion. + +
+ +
+ +## Introduction + +[MMFashion](https://github.com/open-mmlab/mmfashion) develops "fashion parsing and segmentation" module +based on the dataset +[DeepFashion-Inshop](https://drive.google.com/drive/folders/0B7EVK8r0v71pVDZFQXRsMDZCX1E?usp=sharing). +Its annotation follows COCO style. +To use it, you need to first download the data. Note that we only use "img_highres" in this task. +The file tree should be like this: + +```sh +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── DeepFashion +│ │ ├── In-shop +│ │ ├── Anno +│ │ │   ├── segmentation +│ │ │   | ├── DeepFashion_segmentation_train.json +│ │ │   | ├── DeepFashion_segmentation_query.json +│ │ │   | ├── DeepFashion_segmentation_gallery.json +│ │ │   ├── list_bbox_inshop.txt +│ │ │   ├── list_description_inshop.json +│ │ │   ├── list_item_inshop.txt +│ │ │   └── list_landmarks_inshop.txt +│ │ ├── Eval +│ │ │ └── list_eval_partition.txt +│ │ ├── Img +│ │ │ ├── img +│ │ │ │ ├──XXX.jpg +│ │ │ ├── img_highres +│ │ │ └── ├──XXX.jpg + +``` + +After that you can train the Mask RCNN r50 on DeepFashion-In-shop dataset by launching training with the `mask_rcnn_r50_fpn_1x.py` config +or creating your own config file. + +## Results and Models + +| Backbone | Model type | Dataset | bbox detection Average Precision | segmentation Average Precision | Config | Download (Google) | +| :---------: | :----------: | :-----------------: | :--------------------------------: | :----------------------------: | :---------:| :-------------------------: | +| ResNet50 | Mask RCNN | DeepFashion-In-shop | 0.599 | 0.584 |[config](https://github.com/open-mmlab/mmdetection/blob/master/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion/mask_rcnn_r50_fpn_15e_deepfashion_20200329_192752.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion/20200329_192752.log.json) | + +## Citation + +```latex +@inproceedings{liuLQWTcvpr16DeepFashion, + author = {Liu, Ziwei and Luo, Ping and Qiu, Shi and Wang, Xiaogang and Tang, Xiaoou}, + title = {DeepFashion: Powering Robust Clothes Recognition and Retrieval with Rich Annotations}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2016} +} +``` diff --git a/configs/mmdet/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py b/configs/mmdet/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py new file mode 100644 index 00000000..c4e86387 --- /dev/null +++ b/configs/mmdet/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py @@ -0,0 +1,10 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/deepfashion.py', '../_base_/schedules/schedule_1x.py', + '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=15), mask_head=dict(num_classes=15))) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=15) diff --git a/configs/mmdet/deformable_detr/README.md b/configs/mmdet/deformable_detr/README.md new file mode 100644 index 00000000..f415be35 --- /dev/null +++ b/configs/mmdet/deformable_detr/README.md @@ -0,0 +1,41 @@ +# Deformable DETR + +> [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159) + + + +## Abstract + +DETR has been recently proposed to eliminate the need for many hand-designed components in object detection while demonstrating good performance. However, it suffers from slow convergence and limited feature spatial resolution, due to the limitation of Transformer attention modules in processing image feature maps. To mitigate these issues, we proposed Deformable DETR, whose attention modules only attend to a small set of key sampling points around a reference. Deformable DETR can achieve better performance than DETR (especially on small objects) with 10 times less training epochs. Extensive experiments on the COCO benchmark demonstrate the effectiveness of our approach. + +
+ +
+ +## Results and Models + +| Backbone | Model | Lr schd | box AP | Config | Download | +|:------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | Deformable DETR |50e | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.log.json) | +| R-50 | + iterative bounding box refinement |50e | 46.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.log.json) | +| R-50 | ++ two-stage Deformable DETR |50e | 46.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.log.json) | + +# NOTE + +1. All models are trained with batch size 32. +2. The performance is unstable. `Deformable DETR` and `iterative bounding box refinement` may fluctuate about 0.3 mAP. `two-stage Deformable DETR` may fluctuate about 0.2 mAP. + +## Citation + +We provide the config files for Deformable DETR: [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159). + +```latex +@inproceedings{ +zhu2021deformable, +title={Deformable DETR: Deformable Transformers for End-to-End Object Detection}, +author={Xizhou Zhu and Weijie Su and Lewei Lu and Bin Li and Xiaogang Wang and Jifeng Dai}, +booktitle={International Conference on Learning Representations}, +year={2021}, +url={https://openreview.net/forum?id=gZ9hCDWe6ke} +} +``` diff --git a/configs/mmdet/deformable_detr/deformable_detr_r50_16x2_50e_coco.py b/configs/mmdet/deformable_detr/deformable_detr_r50_16x2_50e_coco.py new file mode 100644 index 00000000..c64d09fe --- /dev/null +++ b/configs/mmdet/deformable_detr/deformable_detr_r50_16x2_50e_coco.py @@ -0,0 +1,177 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DeformableDETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='ChannelMapper', + in_channels=[512, 1024, 2048], + kernel_size=1, + out_channels=256, + act_cfg=None, + norm_cfg=dict(type='GN', num_groups=32), + num_outs=4), + bbox_head=dict( + type='DeformableDETRHead', + num_query=300, + num_classes=80, + in_channels=2048, + sync_cls_avg_factor=True, + as_two_stage=False, + transformer=dict( + type='DeformableDetrTransformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', embed_dims=256), + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DeformableDetrTransformerDecoder', + num_layers=6, + return_intermediate=True, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + dict( + type='MultiScaleDeformableAttention', + embed_dims=256) + ], + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')))), + positional_encoding=dict( + type='SinePositionalEncoding', + num_feats=128, + normalize=True, + offset=-0.5), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[ + [ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + # The radio of all image in train dataset < 7 + # follow the original impl + img_scale=[(400, 4200), (500, 4200), (600, 4200)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(filter_empty_gt=False, pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=2e-4, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={ + 'backbone': dict(lr_mult=0.1), + 'sampling_offsets': dict(lr_mult=0.1), + 'reference_points': dict(lr_mult=0.1) + })) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[40]) +runner = dict(type='EpochBasedRunner', max_epochs=50) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (16 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=32) diff --git a/configs/mmdet/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py b/configs/mmdet/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py new file mode 100644 index 00000000..01f13df4 --- /dev/null +++ b/configs/mmdet/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(with_box_refine=True)) diff --git a/configs/mmdet/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py b/configs/mmdet/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py new file mode 100644 index 00000000..2aa840d9 --- /dev/null +++ b/configs/mmdet/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_refine_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(as_two_stage=True)) diff --git a/configs/mmdet/deformable_detr/metafile.yml b/configs/mmdet/deformable_detr/metafile.yml new file mode 100644 index 00000000..873292db --- /dev/null +++ b/configs/mmdet/deformable_detr/metafile.yml @@ -0,0 +1,56 @@ +Collections: + - Name: Deformable DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: + URL: https://openreview.net/forum?id=gZ9hCDWe6ke + Title: 'Deformable DETR: Deformable Transformers for End-to-End Object Detection' + README: configs/deformable_detr/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.12.0/mmdet/models/detectors/deformable_detr.py#L6 + Version: v2.12.0 + +Models: + - Name: deformable_detr_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth + + - Name: deformable_detr_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth + + - Name: deformable_detr_twostage_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth diff --git a/configs/mmdet/detectors/README.md b/configs/mmdet/detectors/README.md new file mode 100644 index 00000000..3504ee27 --- /dev/null +++ b/configs/mmdet/detectors/README.md @@ -0,0 +1,69 @@ +# DetectoRS + +> [DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution](https://arxiv.org/abs/2006.02334) + + + +## Abstract + +Many modern object detectors demonstrate outstanding performances by using the mechanism of looking and thinking twice. In this paper, we explore this mechanism in the backbone design for object detection. At the macro level, we propose Recursive Feature Pyramid, which incorporates extra feedback connections from Feature Pyramid Networks into the bottom-up backbone layers. At the micro level, we propose Switchable Atrous Convolution, which convolves the features with different atrous rates and gathers the results using switch functions. Combining them results in DetectoRS, which significantly improves the performances of object detection. On COCO test-dev, DetectoRS achieves state-of-the-art 55.7% box AP for object detection, 48.5% mask AP for instance segmentation, and 50.0% PQ for panoptic segmentation. + +
+ +
+ +## Introduction + +DetectoRS requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +DetectoRS includes two major components: + +- Recursive Feature Pyramid (RFP). +- Switchable Atrous Convolution (SAC). + +They can be used independently. +Combining them together results in DetectoRS. +The results on COCO 2017 val are shown in the below table. + +| Method | Detector | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| RFP | Cascade + ResNet-50 | 1x | 7.5 | - | 44.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco_20200624_104126.log.json) | +| SAC | Cascade + ResNet-50 | 1x | 5.6 | - | 45.0| | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco_20200624_104402.log.json) | +| DetectoRS | Cascade + ResNet-50 | 1x | 9.9 | - | 47.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco_20200706_001203.log.json) | +| RFP | HTC + ResNet-50 | 1x | 11.2 | - | 46.6 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco_20200624_103053.log.json) | +| SAC | HTC + ResNet-50 | 1x | 9.3 | - | 46.4 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco_20200624_103111.log.json) | +| DetectoRS | HTC + ResNet-50 | 1x | 13.6 | - | 49.1 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco_20200624_103659.log.json) | +| DetectoRS | HTC + ResNet-101 | 20e | 19.6 | | 50.5 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r101_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638-348d533b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638.log.json) | + +*Note*: This is a re-implementation based on MMDetection-V2. +The original implementation is based on MMDetection-V1. + +## Citation + +We provide the config files for [DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution](https://arxiv.org/pdf/2006.02334.pdf). + +```latex +@article{qiao2020detectors, + title={DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution}, + author={Qiao, Siyuan and Chen, Liang-Chieh and Yuille, Alan}, + journal={arXiv preprint arXiv:2006.02334}, + year={2020} +} +``` diff --git a/configs/mmdet/detectors/cascade_rcnn_r50_rfp_1x_coco.py b/configs/mmdet/detectors/cascade_rcnn_r50_rfp_1x_coco.py new file mode 100644 index 00000000..4430d8a6 --- /dev/null +++ b/configs/mmdet/detectors/cascade_rcnn_r50_rfp_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/configs/mmdet/detectors/cascade_rcnn_r50_sac_1x_coco.py b/configs/mmdet/detectors/cascade_rcnn_r50_sac_1x_coco.py new file mode 100644 index 00000000..ccd9319b --- /dev/null +++ b/configs/mmdet/detectors/cascade_rcnn_r50_sac_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/configs/mmdet/detectors/detectors_cascade_rcnn_r50_1x_coco.py b/configs/mmdet/detectors/detectors_cascade_rcnn_r50_1x_coco.py new file mode 100644 index 00000000..f7604043 --- /dev/null +++ b/configs/mmdet/detectors/detectors_cascade_rcnn_r50_1x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/configs/mmdet/detectors/detectors_htc_r101_20e_coco.py b/configs/mmdet/detectors/detectors_htc_r101_20e_coco.py new file mode 100644 index 00000000..93d7d2b1 --- /dev/null +++ b/configs/mmdet/detectors/detectors_htc_r101_20e_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r101_fpn_20e_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet101', + style='pytorch'))) diff --git a/configs/mmdet/detectors/detectors_htc_r50_1x_coco.py b/configs/mmdet/detectors/detectors_htc_r50_1x_coco.py new file mode 100644 index 00000000..0d2fc4f7 --- /dev/null +++ b/configs/mmdet/detectors/detectors_htc_r50_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/configs/mmdet/detectors/htc_r50_rfp_1x_coco.py b/configs/mmdet/detectors/htc_r50_rfp_1x_coco.py new file mode 100644 index 00000000..496104e1 --- /dev/null +++ b/configs/mmdet/detectors/htc_r50_rfp_1x_coco.py @@ -0,0 +1,24 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/configs/mmdet/detectors/htc_r50_sac_1x_coco.py b/configs/mmdet/detectors/htc_r50_sac_1x_coco.py new file mode 100644 index 00000000..72d4db96 --- /dev/null +++ b/configs/mmdet/detectors/htc_r50_sac_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/configs/mmdet/detectors/metafile.yml b/configs/mmdet/detectors/metafile.yml new file mode 100644 index 00000000..4bed5694 --- /dev/null +++ b/configs/mmdet/detectors/metafile.yml @@ -0,0 +1,114 @@ +Collections: + - Name: DetectoRS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ASPP + - FPN + - RFP + - RPN + - ResNet + - RoIAlign + - SAC + Paper: + URL: https://arxiv.org/abs/2006.02334 + Title: 'DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution' + README: configs/detectors/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/backbones/detectors_resnet.py#L205 + Version: v2.2.0 + +Models: + - Name: cascade_rcnn_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth + + - Name: cascade_rcnn_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth + + - Name: detectors_cascade_rcnn_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth + + - Name: htc_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 11.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth + + - Name: htc_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 9.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth + + - Name: detectors_htc_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_htc_r50_1x_coco.py + Metadata: + Training Memory (GB): 13.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth diff --git a/configs/mmdet/detr/README.md b/configs/mmdet/detr/README.md new file mode 100644 index 00000000..5f25357a --- /dev/null +++ b/configs/mmdet/detr/README.md @@ -0,0 +1,37 @@ +# DETR + +> [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872) + + + +## Abstract + +We present a new method that views object detection as a direct set prediction problem. Our approach streamlines the detection pipeline, effectively removing the need for many hand-designed components like a non-maximum suppression procedure or anchor generation that explicitly encode our prior knowledge about the task. The main ingredients of the new framework, called DEtection TRansformer or DETR, are a set-based global loss that forces unique predictions via bipartite matching, and a transformer encoder-decoder architecture. Given a fixed small set of learned object queries, DETR reasons about the relations of the objects and the global image context to directly output the final set of predictions in parallel. The new model is conceptually simple and does not require a specialized library, unlike many other modern detectors. DETR demonstrates accuracy and run-time performance on par with the well-established and highly-optimized Faster RCNN baseline on the challenging COCO object detection dataset. Moreover, DETR can be easily generalized to produce panoptic segmentation in a unified manner. We show that it significantly outperforms competitive baselines. + +
+ +
+ +## Results and Models + +| Backbone | Model | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | DETR |150e |7.9| | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detr/detr_r50_8x2_150e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835.log.json) | + +## Citation + +We provide the config files for DETR: [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872). + +```latex +@inproceedings{detr, + author = {Nicolas Carion and + Francisco Massa and + Gabriel Synnaeve and + Nicolas Usunier and + Alexander Kirillov and + Sergey Zagoruyko}, + title = {End-to-End Object Detection with Transformers}, + booktitle = {ECCV}, + year = {2020} +} +``` diff --git a/configs/mmdet/detr/detr_r50_8x2_150e_coco.py b/configs/mmdet/detr/detr_r50_8x2_150e_coco.py new file mode 100644 index 00000000..892447de --- /dev/null +++ b/configs/mmdet/detr/detr_r50_8x2_150e_coco.py @@ -0,0 +1,150 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + bbox_head=dict( + type='DETRHead', + num_classes=80, + in_channels=2048, + transformer=dict( + type='Transformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1) + ], + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')), + )), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=1.), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[100]) +runner = dict(type='EpochBasedRunner', max_epochs=150) diff --git a/configs/mmdet/detr/metafile.yml b/configs/mmdet/detr/metafile.yml new file mode 100644 index 00000000..45622cf9 --- /dev/null +++ b/configs/mmdet/detr/metafile.yml @@ -0,0 +1,33 @@ +Collections: + - Name: DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: + URL: https://arxiv.org/abs/2005.12872 + Title: 'End-to-End Object Detection with Transformers' + README: configs/detr/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.7.0/mmdet/models/detectors/detr.py#L7 + Version: v2.7.0 + +Models: + - Name: detr_r50_8x2_150e_coco + In Collection: DETR + Config: configs/detr/detr_r50_8x2_150e_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 150 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth diff --git a/configs/mmdet/double_heads/README.md b/configs/mmdet/double_heads/README.md new file mode 100644 index 00000000..c7507e86 --- /dev/null +++ b/configs/mmdet/double_heads/README.md @@ -0,0 +1,32 @@ +# Double Heads + +> [Rethinking Classification and Localization for Object Detection](https://arxiv.org/abs/1904.06493) + + + +## Abstract + +Two head structures (i.e. fully connected head and convolution head) have been widely used in R-CNN based detectors for classification and localization tasks. However, there is a lack of understanding of how does these two head structures work for these two tasks. To address this issue, we perform a thorough analysis and find an interesting fact that the two head structures have opposite preferences towards the two tasks. Specifically, the fully connected head (fc-head) is more suitable for the classification task, while the convolution head (conv-head) is more suitable for the localization task. Furthermore, we examine the output feature maps of both heads and find that fc-head has more spatial sensitivity than conv-head. Thus, fc-head has more capability to distinguish a complete object from part of an object, but is not robust to regress the whole object. Based upon these findings, we propose a Double-Head method, which has a fully connected head focusing on classification and a convolution head for bounding box regression. Without bells and whistles, our method gains +3.5 and +2.8 AP on MS COCO dataset from Feature Pyramid Network (FPN) baselines with ResNet-50 and ResNet-101 backbones, respectively. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 6.8 | 9.5 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130_220238.log.json) | + +## Citation + +```latex +@article{wu2019rethinking, + title={Rethinking Classification and Localization for Object Detection}, + author={Yue Wu and Yinpeng Chen and Lu Yuan and Zicheng Liu and Lijuan Wang and Hongzhi Li and Yun Fu}, + year={2019}, + eprint={1904.06493}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` diff --git a/configs/mmdet/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..9b8118b4 --- /dev/null +++ b/configs/mmdet/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) diff --git a/configs/mmdet/double_heads/metafile.yml b/configs/mmdet/double_heads/metafile.yml new file mode 100644 index 00000000..6fe9b7af --- /dev/null +++ b/configs/mmdet/double_heads/metafile.yml @@ -0,0 +1,41 @@ +Collections: + - Name: Rethinking Classification and Localization for Object Detection + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/pdf/1904.06493 + Title: 'Rethinking Classification and Localization for Object Detection' + README: configs/double_heads/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/roi_heads/double_roi_head.py#L6 + Version: v2.0.0 + +Models: + - Name: dh_faster_rcnn_r50_fpn_1x_coco + In Collection: Rethinking Classification and Localization for Object Detection + Config: configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 105.26 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth diff --git a/configs/mmdet/dyhead/README.md b/configs/mmdet/dyhead/README.md new file mode 100644 index 00000000..068a35b1 --- /dev/null +++ b/configs/mmdet/dyhead/README.md @@ -0,0 +1,46 @@ +# DyHead + +> [Dynamic Head: Unifying Object Detection Heads with Attentions](https://arxiv.org/abs/2106.08322) + + + +## Abstract + +The complex nature of combining localization and classification in object detection has resulted in the flourished development of methods. Previous works tried to improve the performance in various object detection heads but failed to present a unified view. In this paper, we present a novel dynamic head framework to unify object detection heads with attentions. By coherently combining multiple self-attention mechanisms between feature levels for scale-awareness, among spatial locations for spatial-awareness, and within output channels for task-awareness, the proposed approach significantly improves the representation ability of object detection heads without any computational overhead. Further experiments demonstrate that the effectiveness and efficiency of the proposed dynamic head on the COCO benchmark. With a standard ResNeXt-101-DCN backbone, we largely improve the performance over popular object detectors and achieve a new state-of-the-art at 54.0 AP. Furthermore, with latest transformer backbone and extra data, we can push current best COCO result to a new record at 60.6 AP. + +
+ +
+ +## Results and Models + +| Method | Backbone | Style | Setting | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------:|:--------:|:-------:|:------------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| ATSS | R-50 | caffe | reproduction | 1x | 5.4 | 13.2 | 42.5 | [config](./atss_r50_caffe_fpn_dyhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_for_reproduction_1x_coco/atss_r50_fpn_dyhead_for_reproduction_4x4_1x_coco_20220107_213939-162888e6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_for_reproduction_1x_coco/atss_r50_fpn_dyhead_for_reproduction_4x4_1x_coco_20220107_213939.log.json) | +| ATSS | R-50 | pytorch | simple | 1x | 4.9 | 13.7 | 43.3 | [config](./atss_r50_fpn_dyhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_4x4_1x_coco/atss_r50_fpn_dyhead_4x4_1x_coco_20211219_023314-eaa620c6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_4x4_1x_coco/atss_r50_fpn_dyhead_4x4_1x_coco_20211219_023314.log.json) | + +- We trained the above models with 4 GPUs and 4 `samples_per_gpu`. +- The `reproduction` setting aims to reproduce the official implementation based on Detectron2. +- The `simple` setting serves as a minimum example to use DyHead in MMDetection. Specifically, + - it adds `DyHead` to `neck` after `FPN` + - it sets `stacked_convs=0` to `bbox_head` +- The `simple` setting achieves higher AP than the original implementation. + We have not conduct ablation study between the two settings. + `dict(type='Pad', size_divisor=128)` may further improve AP by prefer spatial alignment across pyramid levels, although large padding reduces efficiency. + +## Relation to Other Methods + +- DyHead can be regarded as an improved [SEPC](https://arxiv.org/abs/2005.03101) with [DyReLU modules](https://arxiv.org/abs/2003.10027) and simplified [SE blocks](https://arxiv.org/abs/1709.01507). +- Xiyang Dai et al., the author team of DyHead, adopt it for [Dynamic DETR](https://openaccess.thecvf.com/content/ICCV2021/html/Dai_Dynamic_DETR_End-to-End_Object_Detection_With_Dynamic_Attention_ICCV_2021_paper.html). + The description of Dynamic Encoder in Sec. 3.2 will help you understand DyHead. + +## Citation + +```latex +@inproceedings{DyHead_CVPR2021, + author = {Dai, Xiyang and Chen, Yinpeng and Xiao, Bin and Chen, Dongdong and Liu, Mengchen and Yuan, Lu and Zhang, Lei}, + title = {Dynamic Head: Unifying Object Detection Heads With Attentions}, + booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2021} +} +``` diff --git a/configs/mmdet/dyhead/atss_r50_caffe_fpn_dyhead_1x_coco.py b/configs/mmdet/dyhead/atss_r50_caffe_fpn_dyhead_1x_coco.py new file mode 100644 index 00000000..223b6532 --- /dev/null +++ b/configs/mmdet/dyhead/atss_r50_caffe_fpn_dyhead_1x_coco.py @@ -0,0 +1,112 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + dict( + type='DyHead', + in_channels=256, + out_channels=256, + num_blocks=6, + # disable zero_init_offset to follow official implementation + zero_init_offset=False) + ], + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + pred_kernel_size=1, # follow DyHead official implementation + stacked_convs=0, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128], + center_offset=0.5), # follow DyHead official implementation + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) + +# use caffe img_norm, size_divisor=128, pillow resize +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(1333, 800), + keep_ratio=True, + backend='pillow'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True, backend='pillow'), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/dyhead/atss_r50_fpn_dyhead_1x_coco.py b/configs/mmdet/dyhead/atss_r50_fpn_dyhead_1x_coco.py new file mode 100644 index 00000000..8c5109d0 --- /dev/null +++ b/configs/mmdet/dyhead/atss_r50_fpn_dyhead_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + dict(type='DyHead', in_channels=256, out_channels=256, num_blocks=6) + ], + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + stacked_convs=0, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/dyhead/metafile.yml b/configs/mmdet/dyhead/metafile.yml new file mode 100644 index 00000000..a2e9504e --- /dev/null +++ b/configs/mmdet/dyhead/metafile.yml @@ -0,0 +1,63 @@ +Collections: + - Name: DyHead + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x T4 GPUs + Architecture: + - ATSS + - DyHead + - FPN + - ResNet + - Deformable Convolution + - Pyramid Convolution + Paper: + URL: https://arxiv.org/abs/2106.08322 + Title: 'Dynamic Head: Unifying Object Detection Heads with Attentions' + README: configs/dyhead/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.22.0/mmdet/models/necks/dyhead.py#L130 + Version: v2.22.0 + +Models: + - Name: atss_r50_caffe_fpn_dyhead_1x_coco + In Collection: DyHead + Config: configs/dyhead/atss_r50_caffe_fpn_dyhead_1x_coco.py + Metadata: + Training Memory (GB): 5.4 + inference time (ms/im): + - value: 75.7 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_for_reproduction_1x_coco/atss_r50_fpn_dyhead_for_reproduction_4x4_1x_coco_20220107_213939-162888e6.pth + + - Name: atss_r50_fpn_dyhead_1x_coco + In Collection: DyHead + Config: configs/dyhead/atss_r50_fpn_dyhead_1x_coco.py + Metadata: + Training Memory (GB): 4.9 + inference time (ms/im): + - value: 73.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dyhead/atss_r50_fpn_dyhead_4x4_1x_coco/atss_r50_fpn_dyhead_4x4_1x_coco_20211219_023314-eaa620c6.pth diff --git a/configs/mmdet/dynamic_rcnn/README.md b/configs/mmdet/dynamic_rcnn/README.md new file mode 100644 index 00000000..a22138f5 --- /dev/null +++ b/configs/mmdet/dynamic_rcnn/README.md @@ -0,0 +1,30 @@ +# Dynamic R-CNN + +> [Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training](https://arxiv.org/abs/2004.06002) + + + +## Abstract + +Although two-stage object detectors have continuously advanced the state-of-the-art performance in recent years, the training process itself is far from crystal. In this work, we first point out the inconsistency problem between the fixed network settings and the dynamic training procedure, which greatly affects the performance. For example, the fixed label assignment strategy and regression loss function cannot fit the distribution change of proposals and thus are harmful to training high quality detectors. Consequently, we propose Dynamic R-CNN to adjust the label assignment criteria (IoU threshold) and the shape of regression loss function (parameters of SmoothL1 Loss) automatically based on the statistics of proposals during training. This dynamic design makes better use of the training samples and pushes the detector to fit more high quality samples. Specifically, our method improves upon ResNet-50-FPN baseline with 1.9% AP and 5.5% AP90 on the MS COCO dataset with no extra overhead. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.8 | | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x_20200618_095048.log.json) | + +## Citation + +```latex +@article{DynamicRCNN, + author = {Hongkai Zhang and Hong Chang and Bingpeng Ma and Naiyan Wang and Xilin Chen}, + title = {Dynamic {R-CNN}: Towards High Quality Object Detection via Dynamic Training}, + journal = {arXiv preprint arXiv:2004.06002}, + year = {2020} +} +``` diff --git a/configs/mmdet/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..f2deb99e --- /dev/null +++ b/configs/mmdet/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DynamicRoIHead', + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict(nms=dict(iou_threshold=0.85)), + rcnn=dict( + dynamic_rcnn=dict( + iou_topk=75, + beta_topk=10, + update_iter_interval=100, + initial_iou=0.4, + initial_beta=1.0))), + test_cfg=dict(rpn=dict(nms=dict(iou_threshold=0.85)))) diff --git a/configs/mmdet/dynamic_rcnn/metafile.yml b/configs/mmdet/dynamic_rcnn/metafile.yml new file mode 100644 index 00000000..fec43db4 --- /dev/null +++ b/configs/mmdet/dynamic_rcnn/metafile.yml @@ -0,0 +1,35 @@ +Collections: + - Name: Dynamic R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Dynamic R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/pdf/2004.06002 + Title: 'Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training' + README: configs/dynamic_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/roi_heads/dynamic_roi_head.py#L11 + Version: v2.2.0 + +Models: + - Name: dynamic_rcnn_r50_fpn_1x_coco + In Collection: Dynamic R-CNN + Config: configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth diff --git a/configs/mmdet/efficientnet/README.md b/configs/mmdet/efficientnet/README.md new file mode 100644 index 00000000..44f6df29 --- /dev/null +++ b/configs/mmdet/efficientnet/README.md @@ -0,0 +1,30 @@ +# EfficientNet + +> [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](https://arxiv.org/abs/1905.11946v5) + + + +## Introduction + +Convolutional Neural Networks (ConvNets) are commonly developed at a fixed resource budget, and then scaled up for better accuracy if more resources are available. In this paper, we systematically study model scaling and identify that carefully balancing network depth, width, and resolution can lead to better performance. Based on this observation, we propose a new scaling method that uniformly scales all dimensions of depth/width/resolution using a simple yet highly effective compound coefficient. We demonstrate the effectiveness of this method on scaling up MobileNets and ResNet. + +To go even further, we use neural architecture search to design a new baseline network and scale it up to obtain a family of models, called EfficientNets, which achieve much better accuracy and efficiency than previous ConvNets. In particular, our EfficientNet-B7 achieves state-of-the-art 84.3% top-1 accuracy on ImageNet, while being 8.4x smaller and 6.1x faster on inference than the best existing ConvNet. Our EfficientNets also transfer well and achieve state-of-the-art accuracy on CIFAR-100 (91.7%), Flowers (98.8%), and 3 other transfer learning datasets, with an order of magnitude fewer parameters. + +## Results and Models + +### RetinaNet + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|Efficientnet-b3 | pytorch | 1x | - | - | 40.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco/retinanet_effb3_fpn_crop896_8x4_1x_coco_20220322_234806-615a0dda.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco/retinanet_effb3_fpn_crop896_8x4_1x_coco_20220322_234806.log.json) | + +## Citation + +```latex +@article{tan2019efficientnet, + title={Efficientnet: Rethinking model scaling for convolutional neural networks}, + author={Tan, Mingxing and Le, Quoc V}, + journal={arXiv preprint arXiv:1905.11946}, + year={2019} +} +``` diff --git a/configs/mmdet/efficientnet/metafile.yml b/configs/mmdet/efficientnet/metafile.yml new file mode 100644 index 00000000..de40b953 --- /dev/null +++ b/configs/mmdet/efficientnet/metafile.yml @@ -0,0 +1,19 @@ +Models: + - Name: retinanet_effb3_fpn_crop896_8x4_1x_coco + In Collection: RetinaNet + Config: configs/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco/retinanet_effb3_fpn_crop896_8x4_1x_coco_20220322_234806-615a0dda.pth + Paper: + URL: https://arxiv.org/abs/1905.11946v5 + Title: 'EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks' + README: configs/efficientnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.23.0/mmdet/models/backbones/efficientnet.py#L159 + Version: v2.23.0 diff --git a/configs/mmdet/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py b/configs/mmdet/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py new file mode 100644 index 00000000..c90bc167 --- /dev/null +++ b/configs/mmdet/efficientnet/retinanet_effb3_fpn_crop896_8x4_1x_coco.py @@ -0,0 +1,94 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] + +cudnn_benchmark = True +norm_cfg = dict(type='BN', requires_grad=True) +checkpoint = 'https://download.openmmlab.com/mmclassification/v0/efficientnet/efficientnet-b3_3rdparty_8xb32-aa_in1k_20220119-5b4887a0.pth' # noqa +model = dict( + backbone=dict( + _delete_=True, + type='EfficientNet', + arch='b3', + drop_path_rate=0.2, + out_indices=(3, 4, 5), + frozen_stages=0, + norm_cfg=dict( + type='SyncBN', requires_grad=True, eps=1e-3, momentum=0.01), + norm_eval=False, + init_cfg=dict( + type='Pretrained', prefix='backbone', checkpoint=checkpoint)), + neck=dict( + in_channels=[48, 136, 384], + start_level=0, + out_channels=256, + relu_before_extra_convs=True, + no_norm_on_lateral=True, + norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) + +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +img_size = (896, 896) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=img_size, + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=img_size), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=img_size), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=img_size, + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=img_size), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=4, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer_config = dict(grad_clip=None) +optimizer = dict( + type='SGD', + lr=0.04, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[8, 11]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=12) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (4 samples per GPU) +auto_scale_lr = dict(base_batch_size=32) diff --git a/configs/mmdet/empirical_attention/README.md b/configs/mmdet/empirical_attention/README.md new file mode 100644 index 00000000..ddf8194b --- /dev/null +++ b/configs/mmdet/empirical_attention/README.md @@ -0,0 +1,33 @@ +# Empirical Attention + +> [An Empirical Study of Spatial Attention Mechanisms in Deep Networks](https://arxiv.org/abs/1904.05873) + + + +## Abstract + +Attention mechanisms have become a popular component in deep neural networks, yet there has been little examination of how different influencing factors and methods for computing attention from these factors affect performance. Toward a better general understanding of attention mechanisms, we present an empirical study that ablates various spatial attention elements within a generalized attention formulation, encompassing the dominant Transformer attention as well as the prevalent deformable convolution and dynamic convolution modules. Conducted on a variety of applications, the study yields significant findings about spatial attention in deep networks, some of which run counter to conventional understanding. For example, we find that the query and key content comparison in Transformer attention is negligible for self-attention, but vital for encoder-decoder attention. A proper combination of deformable convolution with key content only saliency achieves the best accuracy-efficiency tradeoff in self-attention. Our results suggest that there exists much room for improvement in the design of attention mechanisms. + +
+ +
+ +## Results and Models + +| Backbone | Attention Component | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------------:|:----:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 1111 | N | 1x | 8.0 | 13.8 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130_210344.log.json) | +| R-50 | 0010 | N | 1x | 4.2 | 18.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130_210125.log.json) | +| R-50 | 1111 | Y | 1x | 8.0 | 12.7 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130_204442.log.json) | +| R-50 | 0010 | Y | 1x | 4.2 | 17.1 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130_210410.log.json) | + +## Citation + +```latex +@article{zhu2019empirical, + title={An Empirical Study of Spatial Attention Mechanisms in Deep Networks}, + author={Zhu, Xizhou and Cheng, Dazhi and Zhang, Zheng and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1904.05873}, + year={2019} +} +``` diff --git a/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py new file mode 100644 index 00000000..a544e3ab --- /dev/null +++ b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py new file mode 100644 index 00000000..bbefd27a --- /dev/null +++ b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py new file mode 100644 index 00000000..13a4645b --- /dev/null +++ b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py new file mode 100644 index 00000000..b1f26c08 --- /dev/null +++ b/configs/mmdet/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/configs/mmdet/empirical_attention/metafile.yml b/configs/mmdet/empirical_attention/metafile.yml new file mode 100644 index 00000000..923bcb20 --- /dev/null +++ b/configs/mmdet/empirical_attention/metafile.yml @@ -0,0 +1,103 @@ +Collections: + - Name: Empirical Attention + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + - FPN + - RPN + - ResNet + - RoIAlign + - Spatial Attention + Paper: + URL: https://arxiv.org/pdf/1904.05873 + Title: 'An Empirical Study of Spatial Attention Mechanisms in Deep Networks' + README: configs/empirical_attention/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/generalized_attention.py#L10 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_attention_1111_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth + + - Name: faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 78.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 58.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth diff --git a/configs/mmdet/fast_rcnn/README.md b/configs/mmdet/fast_rcnn/README.md new file mode 100644 index 00000000..dbe926d6 --- /dev/null +++ b/configs/mmdet/fast_rcnn/README.md @@ -0,0 +1,72 @@ +# Fast R-CNN + +> [Fast R-CNN](https://arxiv.org/abs/1504.08083) + + + +## Abstract + +This paper proposes a Fast Region-based Convolutional Network method (Fast R-CNN) for object detection. Fast R-CNN builds on previous work to efficiently classify object proposals using deep convolutional networks. Compared to previous work, Fast R-CNN employs several innovations to improve training and testing speed while also increasing detection accuracy. Fast R-CNN trains the very deep VGG16 network 9x faster than R-CNN, is 213x faster at test-time, and achieves a higher mAP on PASCAL VOC 2012. Compared to SPPnet, Fast R-CNN trains VGG16 3x faster, tests 10x faster, and is more accurate. + +
+ +
+ +## Introduction + +Before training the Fast R-CNN, users should first train an [RPN](../rpn/README.md), and use the RPN to extract the region proposals. + +- Firstly, extract the region proposals of the val set by this command as below: +```bash +./tools/dist_test.sh \ + configs/rpn_r50_fpn_1x_coco.py \ + checkpoints/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth \ + 8 \ + --out proposals/rpn_r50_fpn_1x_val2017.pkl +``` + +- Then, change the `ann_file` and `img_prefix` of `data.test` in the RPN config to train set as below: + +```python +data = dict( + test=dict( + ann_file='data/coco/annotations/instances_train2017.json', + img_prefix='data/coco/train2017/')) +``` + +- Extract the region proposals of the train set by this command as below: + +```bash +./tools/dist_test.sh \ + configs/rpn_r50_fpn_1x_coco.py \ + checkpoints/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth \ + 8 \ + --out proposals/rpn_r50_fpn_1x_train2017.pkl +``` + +- Modify the path of `proposal_file` in Fast R-CNN config as below: + +```python +data = dict( + train=dict( + proposal_file='proposals/rpn_r50_fpn_1x_train2017.pkl'), + val=dict( + proposal_file='proposals/rpn_r50_fpn_1x_val2017.pkl'), + test=dict( + proposal_file='proposals/rpn_r50_fpn_1x_val2017.pkl')) +``` + +Finally, users can start training the Fast R-CNN. + +## Results and Models + +## Citation + +```latex +@inproceedings{girshick2015fast, + title={Fast r-cnn}, + author={Girshick, Ross}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2015} +} +``` diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..3ab8e981 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './fast_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..83852b24 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 00000000..c2208857 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..f1b29ef3 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(type='BN', requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..d2f080e9 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/fast_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) diff --git a/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py b/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 00000000..228e8564 --- /dev/null +++ b/configs/mmdet/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/faster_rcnn/README.md b/configs/mmdet/faster_rcnn/README.md new file mode 100644 index 00000000..359d0ce6 --- /dev/null +++ b/configs/mmdet/faster_rcnn/README.md @@ -0,0 +1,88 @@ +# Faster R-CNN + +> [Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks](https://arxiv.org/abs/1506.01497) + + + +## Abstract + +State-of-the-art object detection networks depend on region proposal algorithms to hypothesize object locations. Advances like SPPnet and Fast R-CNN have reduced the running time of these detection networks, exposing region proposal computation as a bottleneck. In this work, we introduce a Region Proposal Network (RPN) that shares full-image convolutional features with the detection network, thus enabling nearly cost-free region proposals. An RPN is a fully convolutional network that simultaneously predicts object bounds and objectness scores at each position. The RPN is trained end-to-end to generate high-quality region proposals, which are used by Fast R-CNN for detection. We further merge RPN and Fast R-CNN into a single network by sharing their convolutional features---using the recently popular terminology of neural networks with 'attention' mechanisms, the RPN component tells the unified network where to look. For the very deep VGG-16 model, our detection system has a frame rate of 5fps (including all steps) on a GPU, while achieving state-of-the-art object detection accuracy on PASCAL VOC 2007, 2012, and MS COCO datasets with only 300 proposals per image. In ILSVRC and COCO 2015 competitions, Faster R-CNN and RPN are the foundations of the 1st-place winning entries in several tracks. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-C4 | caffe | 1x | - | - | 35.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco/faster_rcnn_r50_caffe_c4_1x_coco_20220316_150152-3f885b85.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco/faster_rcnn_r50_caffe_c4_1x_coco_20220316_150152.log.json) | +| R-50-DC5 | caffe | 1x | - | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909.log.json) | +| R-50-FPN | caffe | 1x | 3.8 | | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_20200504_180032.log.json) | +| R-50-FPN | pytorch | 1x | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN (FP16) | pytorch | 1x | 3.4 | 28.8 | 37.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204_143530.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_20200504_210434.log.json) | +| R-101-FPN | caffe | 1x | 5.7 | | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_20200504_180057.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 15.6 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130_204655.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_20200504_210455.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 13.8 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203_000520.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_20200506_041400.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 9.4 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204_134340.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033.log.json) | + +## Different regression loss + +We trained with R-50-FPN pytorch style backbone for 1x schedule. + +| Backbone | Loss type | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-------: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | L1Loss | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | IoULoss | | | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco_20200506_095954.log.json) | +| R-50-FPN | GIoULoss | | | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco_20200505_161120.log.json) | +| R-50-FPN | BoundedIoULoss | | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco_20200505_160738.log.json) | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-C4](./faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py) | caffe | 1x | - | | 35.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco/faster_rcnn_r50_caffe_c4_mstrain_1x_coco_20220316_150527-db276fed.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco/faster_rcnn_r50_caffe_c4_mstrain_1x_coco_20220316_150527.log.json) | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | caffe | 1x | - | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851.log.json) | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | caffe | 3x | - | | 38.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | caffe | 2x | 3.7 | | 39.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_20200504_231813.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 3.7 | | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054.log.json) | +| [R-50-FPN](./faster_rcnn_r50_fpn_mstrain_3x_coco.py) | pytorch | 3x | 3.9 | | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [R-101-FPN](./faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 5.6 | | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742.log.json) | +| [R-101-FPN](./faster_rcnn_r101_fpn_mstrain_3x_coco.py) | pytorch | 3x | 5.8 | | 41.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [X-101-32x4d-FPN](./faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 7.0 | | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151.log.json) | +| [X-101-32x8d-FPN](./faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.1 | | 42.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954.log.json) | +| [X-101-64x4d-FPN](./faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.0 | | 43.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528.log.json) | + +We further finetune some pre-trained models on the COCO subsets, which only contain only a few of the 80 categories. + +| Backbone | Style | Class name | Pre-traind model | Mem (GB) | box AP | Config | Download | +| ------------------------------------------------------------ | ----- | ------------------ | ------------------------------------------------------------ | -------- | ------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | caffe | person | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 55.8 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929-d022e227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | caffe | person-bicycle-car | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 44.1 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117-6eda6d92.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117.log.json) | + +## Torchvision New Receipe (TNR) + +Torchvision released its high-precision ResNet models. The training details can be found on the [Pytorch website](https://pytorch.org/blog/how-to-train-state-of-the-art-models-using-torchvision-latest-primitives/). Here, we have done grid searches on learning rate and weight decay and found the optimal hyper-parameter on the detection task. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-TNR](./faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py) | pytorch | 1x | - | | 40.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco_20220320_085147-efedfda4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco_20220320_085147.log.json) | + +## Citation + +```latex +@article{Ren_2017, + title={Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + year={2017}, + month={Jun}, +} +``` diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..c6f078c7 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..6a13fe9f --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,49 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..1de53a6c --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 00000000..0d415994 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..0b498bb6 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 00000000..b071962e --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py new file mode 100644 index 00000000..f4d83e6b --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = './faster_rcnn_r50_caffe_c4_1x_coco.py' +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py new file mode 100644 index 00000000..ee2010c6 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py new file mode 100644 index 00000000..14eaef2d --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py new file mode 100644 index 00000000..403747f1 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..56c01bdc --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py new file mode 100644 index 00000000..b5aea6a7 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'faster_rcnn_r50_caffe_fpn_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py new file mode 100644 index 00000000..4f1f376c --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=3))) +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py new file mode 100644 index 00000000..b5dfb4fe --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=1))) +classes = ('person', ) +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 00000000..f807a19a --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,46 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 00000000..df58973f --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..9eeaacea --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,47 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py new file mode 100644 index 00000000..74dca24f --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..009bd93d --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 00000000..e77a7fa8 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py new file mode 100644 index 00000000..648081f1 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='BoundedIoULoss', loss_weight=10.0)))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py new file mode 100644 index 00000000..886d5668 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ciou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='CIoULoss', loss_weight=12.0)))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 00000000..acd4040c --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py new file mode 100644 index 00000000..5556c497 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py new file mode 100644 index 00000000..ddf663e4 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='IoULoss', loss_weight=10.0)))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..faf8f924 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,3 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py new file mode 100644 index 00000000..f897e7c5 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(train_cfg=dict(rcnn=dict(sampler=dict(type='OHEMSampler')))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py new file mode 100644 index 00000000..759ae3a7 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='soft_nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py new file mode 100644 index 00000000..ecbfb928 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +checkpoint = 'https://download.pytorch.org/models/resnet50-11ad3fa6.pth' +model = dict( + backbone=dict(init_cfg=dict(type='Pretrained', checkpoint=checkpoint))) + +# `lr` and `weight_decay` have been searched to be optimal. +optimizer = dict( + _delete_=True, + type='AdamW', + lr=0.0001, + weight_decay=0.1, + paramwise_cfg=dict(norm_decay_mult=0., bypass_duplicate=True)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..3808c9f2 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 00000000..e93f5d81 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..f55985d6 --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..a5d5aebb --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +# ResNeXt-101-32x8d model trained with Caffe2 at FB, +# so the mean and std need to be changed. +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..8bf2b65a --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 00000000..7ea9b2da --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..80397f4b --- /dev/null +++ b/configs/mmdet/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/faster_rcnn/metafile.yml b/configs/mmdet/faster_rcnn/metafile.yml new file mode 100644 index 00000000..91d6751b --- /dev/null +++ b/configs/mmdet/faster_rcnn/metafile.yml @@ -0,0 +1,451 @@ +Collections: + - Name: Faster R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1506.01497 + Title: "Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks" + README: configs/faster_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/faster_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_caffe_c4_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco/faster_rcnn_r50_caffe_c4_1x_coco_20220316_150152-3f885b85.pth + + - Name: faster_rcnn_r50_caffe_c4_mstrain_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_c4_mstrain_1x_coco/faster_rcnn_r50_caffe_c4_mstrain_1x_coco_20220316_150527-db276fed.pth + + - Name: faster_rcnn_r50_caffe_dc5_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth + + - Name: faster_rcnn_r50_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth + + - Name: faster_rcnn_r50_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth + + - Name: faster_rcnn_r50_fpn_fp16_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.4 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + inference time (ms/im): + - value: 34.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth + + - Name: faster_rcnn_r50_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth + + - Name: faster_rcnn_r101_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth + + - Name: faster_rcnn_r101_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth + + - Name: faster_rcnn_r101_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth + + - Name: faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth + + - Name: faster_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth + + - Name: faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth + + - Name: faster_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth + + - Name: faster_rcnn_r50_fpn_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth + + - Name: faster_rcnn_r50_fpn_giou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth + + - Name: faster_rcnn_r50_fpn_bounded_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth + + - Name: faster_rcnn_r50_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth + + - Name: faster_rcnn_r101_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth + + - Name: faster_rcnn_r101_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth + + - Name: faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth + + - Name: faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth + + - Name: faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth + + - Name: faster_rcnn_r50_fpn_tnr-pretrain_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco/faster_rcnn_r50_fpn_tnr-pretrain_1x_coco_20220320_085147-efedfda4.pth diff --git a/configs/mmdet/fcos/README.md b/configs/mmdet/fcos/README.md new file mode 100644 index 00000000..706fad56 --- /dev/null +++ b/configs/mmdet/fcos/README.md @@ -0,0 +1,45 @@ +# FCOS + +> [FCOS: Fully Convolutional One-Stage Object Detection](https://arxiv.org/abs/1904.01355) + + + +## Abstract + +We propose a fully convolutional one-stage object detector (FCOS) to solve object detection in a per-pixel prediction fashion, analogue to semantic segmentation. Almost all state-of-the-art object detectors such as RetinaNet, SSD, YOLOv3, and Faster R-CNN rely on pre-defined anchor boxes. In contrast, our proposed detector FCOS is anchor box free, as well as proposal free. By eliminating the predefined set of anchor boxes, FCOS completely avoids the complicated computation related to anchor boxes such as calculating overlapping during training. More importantly, we also avoid all hyper-parameters related to anchor boxes, which are often very sensitive to the final detection performance. With the only post-processing non-maximum suppression (NMS), FCOS with ResNeXt-64x4d-101 achieves 44.7% in AP with single-model and single-scale testing, surpassing previous one-stage detectors with the advantage of being much simpler. For the first time, we demonstrate a much simpler and flexible detection framework achieving improved detection accuracy. We hope that the proposed FCOS framework can serve as a simple and strong alternative for many other instance-level tasks. + +
+ +
+ +## Results and Models + +| Backbone | Style | GN | MS train | Tricks | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | N | N | N | 1x | 3.6 | 22.7 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/20201227_180009.log.json) | +| R-50 | caffe | Y | N | Y | N | 1x | 3.7 | - | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/20210105_135818.log.json)| +| R-50 | caffe | Y | N | Y | Y | 1x | 3.8 | - | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/20210105_224556.log.json)| +| R-101 | caffe | Y | N | N | N | 1x | 5.5 | 17.3 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/20210103_155046.log.json) | + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | Y | 2x | 2.6 | 22.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20201227_161900.log.json) | +| R-101 | caffe | Y | Y | 2x | 5.5 | 17.3 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20210103_155046.log.json) | +| X-101 | pytorch | Y | Y | 2x | 10.0 | 9.7 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/20210114_133041.log.json) | + +**Notes:** + +- The X-101 backbone is X-101-64x4d. +- Tricks means setting `norm_on_bbox`, `centerness_on_reg`, `center_sampling` as `True`. +- DCN means using `DCNv2` in both backbone and head. + +## Citation + +```latex +@article{tian2019fcos, + title={FCOS: Fully Convolutional One-Stage Object Detection}, + author={Tian, Zhi and Shen, Chunhua and Chen, Hao and He, Tong}, + journal={arXiv preprint arXiv:1904.01355}, + year={2019} +} +``` diff --git a/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py b/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 00000000..2699bdb9 --- /dev/null +++ b/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=False, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py b/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py new file mode 100644 index 00000000..cf93c91e --- /dev/null +++ b/configs/mmdet/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=True, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/configs/mmdet/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py b/configs/mmdet/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 00000000..9f502e7b --- /dev/null +++ b/configs/mmdet/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict(bbox_head=dict(center_sampling=True, center_sample_radius=1.5)) diff --git a/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py b/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 00000000..45bea48d --- /dev/null +++ b/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) diff --git a/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 00000000..f4d36f1e --- /dev/null +++ b/configs/mmdet/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,47 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 00000000..955787ba --- /dev/null +++ b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,106 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='constant', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 00000000..2816b16f --- /dev/null +++ b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,4 @@ +# TODO: Remove this config after benchmarking all related configs +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +data = dict(samples_per_gpu=4, workers_per_gpu=4) diff --git a/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 00000000..497d03f6 --- /dev/null +++ b/configs/mmdet/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py b/configs/mmdet/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 00000000..e70e4651 --- /dev/null +++ b/configs/mmdet/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,60 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/fcos/metafile.yml b/configs/mmdet/fcos/metafile.yml new file mode 100644 index 00000000..ae922eb9 --- /dev/null +++ b/configs/mmdet/fcos/metafile.yml @@ -0,0 +1,146 @@ +Collections: + - Name: FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Group Normalization + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.01355 + Title: 'FCOS: Fully Convolutional One-Stage Object Detection' + README: configs/fcos/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/fcos.py#L6 + Version: v2.0.0 + +Models: + - Name: fcos_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + inference time (ms/im): + - value: 44.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth + + - Name: fcos_r101_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth + + - Name: fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 2.6 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth + + - Name: fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth + + - Name: fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth diff --git a/configs/mmdet/foveabox/README.md b/configs/mmdet/foveabox/README.md new file mode 100644 index 00000000..7c82820e --- /dev/null +++ b/configs/mmdet/foveabox/README.md @@ -0,0 +1,53 @@ +# FoveaBox + +> [FoveaBox: Beyond Anchor-based Object Detector](https://arxiv.org/abs/1904.03797) + + + +## Abstract + +We present FoveaBox, an accurate, flexible, and completely anchor-free framework for object detection. While almost all state-of-the-art object detectors utilize predefined anchors to enumerate possible locations, scales and aspect ratios for the search of the objects, their performance and generalization ability are also limited to the design of anchors. Instead, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. The scales of target boxes are naturally associated with feature pyramid representations. In FoveaBox, an instance is assigned to adjacent feature levels to make the model more accurate.We demonstrate its effectiveness on standard benchmarks and report extensive experimental analysis. Without bells and whistles, FoveaBox achieves state-of-the-art single model performance on the standard COCO and Pascal VOC object detection benchmark. More importantly, FoveaBox avoids all computation and hyper-parameters related to anchor boxes, which are often sensitive to the final detection performance. We believe the simple and effective approach will serve as a solid baseline and help ease future research for object detection. + +
+ +
+ +## Introduction + +FoveaBox is an accurate, flexible and completely anchor-free object detection system for object detection framework, as presented in our paper [https://arxiv.org/abs/1904.03797](https://arxiv.org/abs/1904.03797): +Different from previous anchor-based methods, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. + +## Results and Models + +### Results on R50/101-FPN + +| Backbone | Style | align | ms-train| Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | 5.6 | 24.1 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219_223025.log.json) | +| R-50 | pytorch | N | N | 2x | 5.6 | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203_112043.log.json) | +| R-50 | pytorch | Y | N | 2x | 8.1 | 19.4 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203_134252.log.json) | +| R-50 | pytorch | Y | Y | 2x | 8.1 | 18.3 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205_112557.log.json) | +| R-101 | pytorch | N | N | 1x | 9.2 | 17.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219_011740.log.json) | +| R-101 | pytorch | N | N | 2x | 11.7 | - | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208_202059.log.json) | +| R-101 | pytorch | Y | N | 2x | 11.7 | 14.7 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208_203337.log.json) | +| R-101 | pytorch | Y | Y | 2x | 11.7 | 14.7 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208_202124.log.json) | + +[1] *1x and 2x mean the model is trained for 12 and 24 epochs, respectively.* \ +[2] *Align means utilizing deformable convolution to align the cls branch.* \ +[3] *All results are obtained with a single model and without any test time data augmentation.*\ +[4] *We use 4 GPUs for training.* + +Any pull requests or issues are welcome. + +## Citation + +Please consider citing our paper in your publications if the project helps your research. BibTeX reference is as follows. + +```latex +@article{kong2019foveabox, + title={FoveaBox: Beyond Anchor-based Object Detector}, + author={Kong, Tao and Sun, Fuchun and Liu, Huaping and Jiang, Yuning and Shi, Jianbo}, + journal={arXiv preprint arXiv:1904.03797}, + year={2019} +} +``` diff --git a/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 00000000..c5d17849 --- /dev/null +++ b/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,12 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 00000000..cc5affef --- /dev/null +++ b/configs/mmdet/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,29 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 00000000..e7265bcd --- /dev/null +++ b/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 00000000..8fc39bea --- /dev/null +++ b/configs/mmdet/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,25 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/foveabox/fovea_r101_fpn_4x4_1x_coco.py b/configs/mmdet/foveabox/fovea_r101_fpn_4x4_1x_coco.py new file mode 100644 index 00000000..9201af11 --- /dev/null +++ b/configs/mmdet/foveabox/fovea_r101_fpn_4x4_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/foveabox/fovea_r101_fpn_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_r101_fpn_4x4_2x_coco.py new file mode 100644 index 00000000..1ef5243f --- /dev/null +++ b/configs/mmdet/foveabox/fovea_r101_fpn_4x4_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/foveabox/fovea_r50_fpn_4x4_1x_coco.py b/configs/mmdet/foveabox/fovea_r50_fpn_4x4_1x_coco.py new file mode 100644 index 00000000..7e986ebc --- /dev/null +++ b/configs/mmdet/foveabox/fovea_r50_fpn_4x4_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FOVEA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + num_outs=5, + add_extra_convs='on_input'), + bbox_head=dict( + type='FoveaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + base_edge_list=[16, 32, 64, 128, 256], + scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)), + sigma=0.4, + with_deform=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.50, + alpha=0.4, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + # training and testing settings + train_cfg=dict(), + test_cfg=dict( + nms_pre=1000, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +data = dict(samples_per_gpu=4, workers_per_gpu=4) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/foveabox/fovea_r50_fpn_4x4_2x_coco.py b/configs/mmdet/foveabox/fovea_r50_fpn_4x4_2x_coco.py new file mode 100644 index 00000000..68ce4d25 --- /dev/null +++ b/configs/mmdet/foveabox/fovea_r50_fpn_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/foveabox/metafile.yml b/configs/mmdet/foveabox/metafile.yml new file mode 100644 index 00000000..fe9a2834 --- /dev/null +++ b/configs/mmdet/foveabox/metafile.yml @@ -0,0 +1,172 @@ +Collections: + - Name: FoveaBox + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.03797 + Title: 'FoveaBox: Beyond Anchor-based Object Detector' + README: configs/foveabox/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/fovea.py#L6 + Version: v2.0.0 + +Models: + - Name: fovea_r50_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth + + - Name: fovea_r50_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth + + - Name: fovea_align_r50_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 51.55 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth + + - Name: fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 54.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth + + - Name: fovea_r101_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth + + - Name: fovea_r101_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth + + - Name: fovea_align_r101_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth + + - Name: fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth diff --git a/configs/mmdet/fpg/README.md b/configs/mmdet/fpg/README.md new file mode 100644 index 00000000..9d89510f --- /dev/null +++ b/configs/mmdet/fpg/README.md @@ -0,0 +1,43 @@ +# FPG + +> [Feature Pyramid Grids](https://arxiv.org/abs/2004.03580) + + + +## Abstract + +Feature pyramid networks have been widely adopted in the object detection literature to improve feature representations for better handling of variations in scale. In this paper, we present Feature Pyramid Grids (FPG), a deep multi-pathway feature pyramid, that represents the feature scale-space as a regular grid of parallel bottom-up pathways which are fused by multi-directional lateral connections. FPG can improve single-pathway feature pyramid networks by significantly increasing its performance at similar computation cost, highlighting importance of deep pyramid representations. In addition to its general and uniform structure, over complicated structures that have been found with neural architecture search, it also compares favorably against such approaches without relying on search. We hope that FPG with its uniform and effective nature can serve as a strong component for future work in object recognition. + +
+ +
+ +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. +All backbones are Resnet-50 in pytorch style. + +| Method | Neck | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------------:|:-----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:-------:|:--------:| +| Faster R-CNN | FPG | 50e | 20.0 | - | 42.3 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco_20220311_011856-74109f42.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco_20220311_011856.log.json) | +| Faster R-CNN | FPG-chn128 | 50e | 11.9 | - | 41.2 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011857-9376aa9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011857.log.json) | +| Faster R-CNN | FPN | 50e | 20.0 | - | 38.9 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpn_crop640_50e_coco/faster_rcnn_r50_fpn_crop640_50e_coco_20220311_011857-be7c9f42.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpn_crop640_50e_coco/faster_rcnn_r50_fpn_crop640_50e_coco_20220311_011857.log.json) | +| Mask R-CNN | FPG | 50e | 23.2 | - | 43.0 | 38.1 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco_20220311_011857-233b8334.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco_20220311_011857.log.json) | +| Mask R-CNN | FPG-chn128 | 50e | 15.3 | - | 41.7 | 37.1 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011859-043c9b4e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011859.log.json) | +| Mask R-CNN | FPN | 50e | 23.2 | - | 49.6 | 35.6 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpn_crop640_50e_coco/mask_rcnn_r50_fpn_crop640_50e_coco_20220311_011855-a756664a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpn_crop640_50e_coco/mask_rcnn_r50_fpn_crop640_50e_coco_20220311_011855.log.json) | +| RetinaNet | FPG | 50e | 20.8 | - | 40.5 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco_20220311_110809-b0bcf5f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco_20220311_110809.log.json) | +| RetinaNet | FPG-chn128 | 50e | 19.9 | - | 39.9 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco_20220313_104829-ee99a686.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco_20220313_104829.log.json) | + +**Note**: Chn128 means to decrease the number of channels of features and convs from 256 (default) to 128 in +Neck and BBox Head, which can greatly decrease memory consumption without sacrificing much precision. + +## Citation + +```latex +@article{chen2020feature, + title={Feature pyramid grids}, + author={Chen, Kai and Cao, Yuhang and Loy, Chen Change and Lin, Dahua and Feichtenhofer, Christoph}, + journal={arXiv preprint arXiv:2004.03580}, + year={2020} +} +``` diff --git a/configs/mmdet/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/configs/mmdet/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 00000000..4535034e --- /dev/null +++ b/configs/mmdet/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,9 @@ +_base_ = 'faster_rcnn_r50_fpg_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128))) diff --git a/configs/mmdet/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py b/configs/mmdet/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 00000000..3ab2a2c5 --- /dev/null +++ b/configs/mmdet/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'faster_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/configs/mmdet/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py b/configs/mmdet/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 00000000..e4ec940a --- /dev/null +++ b/configs/mmdet/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,73 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict(bbox_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/configs/mmdet/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 00000000..baa4a5af --- /dev/null +++ b/configs/mmdet/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,10 @@ +_base_ = 'mask_rcnn_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128), + mask_roi_extractor=dict(out_channels=128), + mask_head=dict(in_channels=128))) diff --git a/configs/mmdet/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py b/configs/mmdet/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 00000000..3c9ea276 --- /dev/null +++ b/configs/mmdet/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'mask_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/configs/mmdet/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py b/configs/mmdet/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 00000000..c6bcc242 --- /dev/null +++ b/configs/mmdet/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + norm_cfg=norm_cfg, + num_outs=5), + roi_head=dict( + bbox_head=dict(norm_cfg=norm_cfg), mask_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/fpg/metafile.yml b/configs/mmdet/fpg/metafile.yml new file mode 100644 index 00000000..6b0a6a79 --- /dev/null +++ b/configs/mmdet/fpg/metafile.yml @@ -0,0 +1,104 @@ +Collections: + - Name: Feature Pyramid Grids + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Feature Pyramid Grids + Paper: + URL: https://arxiv.org/abs/2004.03580 + Title: 'Feature Pyramid Grids' + README: configs/fpg/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.10.0/mmdet/models/necks/fpg.py#L101 + Version: v2.10.0 + +Models: + - Name: faster_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.0 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco_20220311_011856-74109f42.pth + + - Name: faster_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 11.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011857-9376aa9d.pth + + - Name: mask_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 23.2 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco_20220311_011857-233b8334.pth + + - Name: mask_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 15.3 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco_20220311_011859-043c9b4e.pth + + - Name: retinanet_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.8 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco_20220311_110809-b0bcf5f4.pth + + - Name: retinanet_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 19.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco_20220313_104829-ee99a686.pth diff --git a/configs/mmdet/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py b/configs/mmdet/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 00000000..9a6cf7e5 --- /dev/null +++ b/configs/mmdet/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,5 @@ +_base_ = 'retinanet_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + bbox_head=dict(in_channels=128)) diff --git a/configs/mmdet/fpg/retinanet_r50_fpg_crop640_50e_coco.py b/configs/mmdet/fpg/retinanet_r50_fpg_crop640_50e_coco.py new file mode 100644 index 00000000..504ed5ec --- /dev/null +++ b/configs/mmdet/fpg/retinanet_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,53 @@ +_base_ = '../nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + _delete_=True, + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + add_extra_convs=True, + start_level=1, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) + +evaluation = dict(interval=2) diff --git a/configs/mmdet/free_anchor/README.md b/configs/mmdet/free_anchor/README.md new file mode 100644 index 00000000..e232f370 --- /dev/null +++ b/configs/mmdet/free_anchor/README.md @@ -0,0 +1,37 @@ +# FreeAnchor + +> [FreeAnchor: Learning to Match Anchors for Visual Object Detection](https://arxiv.org/abs/1909.02466) + + + +## Abstract + +Modern CNN-based object detectors assign anchors for ground-truth objects under the restriction of object-anchor Intersection-over-Unit (IoU). In this study, we propose a learning-to-match approach to break IoU restriction, allowing objects to match anchors in a flexible manner. Our approach, referred to as FreeAnchor, updates hand-crafted anchor assignment to "free" anchor matching by formulating detector training as a maximum likelihood estimation (MLE) procedure. FreeAnchor targets at learning features which best explain a class of objects in terms of both classification and localization. FreeAnchor is implemented by optimizing detection customized likelihood and can be fused with CNN-based detectors in a plug-and-play manner. Experiments on COCO demonstrate that FreeAnchor consistently outperforms their counterparts with significant margins. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:--------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.9 | 18.4 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130_095625.log.json) | +| R-101 | pytorch | 1x | 6.8 | 14.9 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130_100723.log.json) | +| X-101-32x4d | pytorch | 1x | 8.1 | 11.1 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130_095627.log.json) | + +**Notes:** + +- We use 8 GPUs with 2 images/GPU. +- For more settings and models, please refer to the [official repo](https://github.com/zhangxiaosong18/FreeAnchor). + +## Citation + +```latex +@inproceedings{zhang2019freeanchor, + title = {{FreeAnchor}: Learning to Match Anchors for Visual Object Detection}, + author = {Zhang, Xiaosong and Wan, Fang and Liu, Chang and Ji, Rongrong and Ye, Qixiang}, + booktitle = {Neural Information Processing Systems}, + year = {2019} +} +``` diff --git a/configs/mmdet/free_anchor/metafile.yml b/configs/mmdet/free_anchor/metafile.yml new file mode 100644 index 00000000..170fb5c0 --- /dev/null +++ b/configs/mmdet/free_anchor/metafile.yml @@ -0,0 +1,79 @@ +Collections: + - Name: FreeAnchor + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FreeAnchor + - ResNet + Paper: + URL: https://arxiv.org/abs/1909.02466 + Title: 'FreeAnchor: Learning to Match Anchors for Visual Object Detection' + README: configs/free_anchor/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/dense_heads/free_anchor_retina_head.py#L10 + Version: v2.0.0 + +Models: + - Name: retinanet_free_anchor_r50_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.9 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth + + - Name: retinanet_free_anchor_r101_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 67.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth + + - Name: retinanet_free_anchor_x101_32x4d_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 90.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth diff --git a/configs/mmdet/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py b/configs/mmdet/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py new file mode 100644 index 00000000..f4aea53c --- /dev/null +++ b/configs/mmdet/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py b/configs/mmdet/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py new file mode 100644 index 00000000..28f983c2 --- /dev/null +++ b/configs/mmdet/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='FreeAnchorRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.75))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..65f8a9e2 --- /dev/null +++ b/configs/mmdet/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/fsaf/README.md b/configs/mmdet/fsaf/README.md new file mode 100644 index 00000000..64976c57 --- /dev/null +++ b/configs/mmdet/fsaf/README.md @@ -0,0 +1,57 @@ +# FSAF + +> [Feature Selective Anchor-Free Module for Single-Shot Object Detection](https://arxiv.org/abs/1903.00621) + + + +## Abstract + +We motivate and present feature selective anchor-free (FSAF) module, a simple and effective building block for single-shot object detectors. It can be plugged into single-shot detectors with feature pyramid structure. The FSAF module addresses two limitations brought up by the conventional anchor-based detection: 1) heuristic-guided feature selection; 2) overlap-based anchor sampling. The general concept of the FSAF module is online feature selection applied to the training of multi-level anchor-free branches. Specifically, an anchor-free branch is attached to each level of the feature pyramid, allowing box encoding and decoding in the anchor-free manner at an arbitrary level. During training, we dynamically assign each instance to the most suitable feature level. At the time of inference, the FSAF module can work jointly with anchor-based branches by outputting predictions in parallel. We instantiate this concept with simple implementations of anchor-free branches and online feature selection strategy. Experimental results on the COCO detection track show that our FSAF module performs better than anchor-based counterparts while being faster. When working jointly with anchor-based branches, the FSAF module robustly improves the baseline RetinaNet by a large margin under various settings, while introducing nearly free inference overhead. And the resulting best model can achieve a state-of-the-art 44.6% mAP, outperforming all existing single-shot detectors on COCO. + +
+ +
+ +## Introduction + +FSAF is an anchor-free method published in CVPR2019 ([https://arxiv.org/pdf/1903.00621.pdf](https://arxiv.org/pdf/1903.00621.pdf)). +Actually it is equivalent to the anchor-based method with only one anchor at each feature map position in each FPN level. +And this is how we implemented it. +Only the anchor-free branch is released for its better compatibility with the current framework and less computational budget. + +In the original paper, feature maps within the central 0.2-0.5 area of a gt box are tagged as ignored. However, +it is empirically found that a hard threshold (0.2-0.2) gives a further gain on the performance. (see the table below) + +## Results and Models + +### Results on R50/R101/X101-FPN + +| Backbone | ignore range | ms-train| Lr schd |Train Mem (GB)| Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:----------:| :-------: |:-------:|:-------:|:------------:|:---------------:|:--------------:|:-------------:|:------:|:--------:| +| R-50 | 0.2-0.5 | N | 1x | 3.15 | 0.43 | 12.3 | 36.0 (35.9) | | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715-b555b0e0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715_094657.log.json) | +| R-50 | 0.2-0.2 | N | 1x | 3.15 | 0.43 | 13.0 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco_20200428_072327.log.json)| +| R-101 | 0.2-0.2 | N | 1x | 5.08 | 0.58 | 10.8 | 39.3 (37.9) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco_20200428_160348.log.json)| +| X-101 | 0.2-0.2 | N | 1x | 9.38 | 1.23 | 5.6 | 42.4 (41.0) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco_20200428_160424.log.json)| + +**Notes:** + +- *1x means the model is trained for 12 epochs.* +- *AP values in the brackets represent those reported in the original paper.* +- *All results are obtained with a single model and single-scale test.* +- *X-101 backbone represents ResNext-101-64x4d.* +- *All pretrained backbones use pytorch style.* +- *All models are trained on 8 Titan-XP gpus and tested on a single gpu.* + +## Citation + +BibTeX reference is as follows. + +```latex +@inproceedings{zhu2019feature, + title={Feature Selective Anchor-Free Module for Single-Shot Object Detection}, + author={Zhu, Chenchen and He, Yihui and Savvides, Marios}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={840--849}, + year={2019} +} +``` diff --git a/configs/mmdet/fsaf/fsaf_r101_fpn_1x_coco.py b/configs/mmdet/fsaf/fsaf_r101_fpn_1x_coco.py new file mode 100644 index 00000000..12b49fed --- /dev/null +++ b/configs/mmdet/fsaf/fsaf_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/fsaf/fsaf_r50_fpn_1x_coco.py b/configs/mmdet/fsaf/fsaf_r50_fpn_1x_coco.py new file mode 100644 index 00000000..67f3ec1c --- /dev/null +++ b/configs/mmdet/fsaf/fsaf_r50_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='FSAF', + bbox_head=dict( + type='FSAFHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + reg_decoded_bbox=True, + # Only anchor-free branch is implemented. The anchor generator only + # generates 1 anchor at each feature point, as a substitute of the + # grid of features. + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(_delete_=True, type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + _delete_=True, + type='IoULoss', + eps=1e-6, + loss_weight=1.0, + reduction='none')), + # training and testing settings + train_cfg=dict( + assigner=dict( + _delete_=True, + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False)) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=10, norm_type=2)) diff --git a/configs/mmdet/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..89c0c634 --- /dev/null +++ b/configs/mmdet/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/fsaf/metafile.yml b/configs/mmdet/fsaf/metafile.yml new file mode 100644 index 00000000..5434e9ad --- /dev/null +++ b/configs/mmdet/fsaf/metafile.yml @@ -0,0 +1,80 @@ +Collections: + - Name: FSAF + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x Titan-XP GPUs + Architecture: + - FPN + - FSAF + - ResNet + Paper: + URL: https://arxiv.org/abs/1903.00621 + Title: 'Feature Selective Anchor-Free Module for Single-Shot Object Detection' + README: configs/fsaf/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/detectors/fsaf.py#L6 + Version: v2.1.0 + +Models: + - Name: fsaf_r50_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.15 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth + + - Name: fsaf_r101_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.08 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.3 (37.9) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth + + - Name: fsaf_x101_64x4d_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.38 + inference time (ms/im): + - value: 178.57 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 (41.0) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth diff --git a/configs/mmdet/gcnet/README.md b/configs/mmdet/gcnet/README.md new file mode 100644 index 00000000..4d167831 --- /dev/null +++ b/configs/mmdet/gcnet/README.md @@ -0,0 +1,69 @@ +# GCNet + +> [GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond](https://arxiv.org/abs/1904.11492) + + + +## Abstract + +The Non-Local Network (NLNet) presents a pioneering approach for capturing long-range dependencies, via aggregating query-specific global context to each query position. However, through a rigorous empirical analysis, we have found that the global contexts modeled by non-local network are almost the same for different query positions within an image. In this paper, we take advantage of this finding to create a simplified network based on a query-independent formulation, which maintains the accuracy of NLNet but with significantly less computation. We further observe that this simplified design shares similar structure with Squeeze-Excitation Network (SENet). Hence we unify them into a three-step general framework for global context modeling. Within the general framework, we design a better instantiation, called the global context (GC) block, which is lightweight and can effectively model the global context. The lightweight property allows us to apply it for multiple layers in a backbone network to construct a global context network (GCNet), which generally outperforms both simplified NLNet and SENet on major benchmarks for various recognition tasks. + +
+ +
+ +## Introduction + +By [Yue Cao](http://yue-cao.me), [Jiarui Xu](http://jerryxu.net), [Stephen Lin](https://scholar.google.com/citations?user=c3PYmxUAAAAJ&hl=en), Fangyun Wei, [Han Hu](https://sites.google.com/site/hanhushomepage/). + +We provide config files to reproduce the results in the paper for +["GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond"](https://arxiv.org/abs/1904.11492) on COCO object detection. + +**GCNet** is initially described in [arxiv](https://arxiv.org/abs/1904.11492). Via absorbing advantages of Non-Local Networks (NLNet) and Squeeze-Excitation Networks (SENet), GCNet provides a simple, fast and effective approach for global context modeling, which generally outperforms both NLNet and SENet on major benchmarks for various recognition tasks. + +## Results and Models + +The results on COCO 2017val are shown in the below table. + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | | 39.7 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.0 | 39.9 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204_024626.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 11.4 | 41.3 | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205_192835.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.6 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206_112128.log.json) | + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :-------: | +| R-50-FPN | Mask | - | 1x | 4.4 | 16.6 | 38.4 | 34.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202_214122.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | 15.5 | 40.4 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202_174907.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.1 | 40.7 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-101-FPN | Mask | - | 1x | 6.4 | 13.3 | 40.5 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210_220422.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 12.0 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207_015330.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.8 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| X-101-FPN | Mask | - | 1x | 7.6 | 11.3 | 42.4 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211_054326.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r16) | 1x | 8.8 | 9.8 | 43.5 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_164715.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r4) | 1x | 9.0 | 9.7 | 43.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212_070942.log.json) | +| X-101-FPN | Cascade Mask | - | 1x | 9.2 | 8.4 | 44.7 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310_115217.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r16) | 1x | 10.3 | 7.7 | 46.2 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_184154.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r4) | 1x | 10.6 | | 46.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653.log.json) | +| X-101-FPN | DCN Cascade Mask | - | 1x | | | 47.5 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019.log.json)| +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r16) | 1x | | | 48.0 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648.log.json) | +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r4) | 1x | | | 47.9 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851.log.json) | + +**Notes:** + +- The `SyncBN` is added in the backbone for all models in **Table 2**. +- `GC` denotes Global Context (GC) block is inserted after 1x1 conv of backbone. +- `DCN` denotes replace 3x3 conv with 3x3 Deformable Convolution in `c3-c5` stages of backbone. +- `r4` and `r16` denote ratio 4 and ratio 16 in GC block respectively. + +## Citation + +```latex +@article{cao2019GCNet, + title={GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond}, + author={Cao, Yue and Xu, Jiarui and Lin, Stephen and Wei, Fangyun and Hu, Han}, + journal={arXiv preprint arXiv:1904.11492}, + year={2019} +} +``` diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 00000000..5118895f --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..413499dd --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..50689aad --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..13672312 --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..50883ffe --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..31fdd070 --- /dev/null +++ b/configs/mmdet/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..ad6ad476 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..29f91674 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 00000000..6e1c5d0c --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..781dba78 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..32972de8 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..d299b69f --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..5ac908e6 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 00000000..0308a567 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..e04780c5 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..980f8191 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 00000000..f0c96e58 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..7fb8e82e --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 00000000..b1ddbee3 --- /dev/null +++ b/configs/mmdet/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/configs/mmdet/gcnet/metafile.yml b/configs/mmdet/gcnet/metafile.yml new file mode 100644 index 00000000..1281122a --- /dev/null +++ b/configs/mmdet/gcnet/metafile.yml @@ -0,0 +1,440 @@ +Collections: + - Name: GCNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Global Context Block + - FPN + - RPN + - ResNet + - ResNeXt + Paper: + URL: https://arxiv.org/abs/1904.11492 + Title: 'GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond' + README: configs/gcnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/ops/context_block.py#L13 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth + + - Name: mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth + + - Name: mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 87.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth + + - Name: mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 86.21 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 64.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 75.19 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 84.75 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.8 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 119.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth diff --git a/configs/mmdet/gfl/README.md b/configs/mmdet/gfl/README.md new file mode 100644 index 00000000..2a8e60a6 --- /dev/null +++ b/configs/mmdet/gfl/README.md @@ -0,0 +1,42 @@ +# GFL + +> [Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection](https://arxiv.org/abs/2006.04388) + + + +## Abstract + +One-stage detector basically formulates object detection as dense classification and localization. The classification is usually optimized by Focal Loss and the box location is commonly learned under Dirac delta distribution. A recent trend for one-stage detectors is to introduce an individual prediction branch to estimate the quality of localization, where the predicted quality facilitates the classification to improve detection performance. This paper delves into the representations of the above three fundamental elements: quality estimation, classification and localization. Two problems are discovered in existing practices, including (1) the inconsistent usage of the quality estimation and classification between training and inference and (2) the inflexible Dirac delta distribution for localization when there is ambiguity and uncertainty in complex scenes. To address the problems, we design new representations for these elements. Specifically, we merge the quality estimation into the class prediction vector to form a joint representation of localization quality and classification, and use a vector to represent arbitrary distribution of box locations. The improved representations eliminate the inconsistency risk and accurately depict the flexible distribution in real data, but contain continuous labels, which is beyond the scope of Focal Loss. We then propose Generalized Focal Loss (GFL) that generalizes Focal Loss from its discrete form to the continuous version for successful optimization. On COCO test-dev, GFL achieves 45.0\% AP using ResNet-101 backbone, surpassing state-of-the-art SAPD (43.5\%) and ATSS (43.6\%) with higher or comparable inference speed, under the same backbone and training settings. Notably, our best model can achieve a single-model single-scale AP of 48.2\%, at 10 FPS on a single 2080Ti GPU. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Multi-scale Training| Inf time (fps) | box AP | Config | Download | +|:-----------------:|:-------:|:-------:|:-------------------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | No | 19.5 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244.log.json) | +| R-50 | pytorch | 2x | Yes | 19.5 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802.log.json) | +| R-101 | pytorch | 2x | Yes | 14.7 | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126.log.json) | +| R-101-dcnv2 | pytorch | 2x | Yes | 12.9 | 47.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d | pytorch | 2x | Yes | 12.1 | 45.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d-dcnv2 | pytorch | 2x | Yes | 10.7 | 48.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002.log.json) | + +[1] *1x and 2x mean the model is trained for 90K and 180K iterations, respectively.* \ +[2] *All results are obtained with a single model and without any test time data augmentation such as multi-scale, flipping and etc..* \ +[3] *`dcnv2` denotes deformable convolutional networks v2.* \ +[4] *FPS is tested with a single GeForce RTX 2080Ti GPU, using a batch size of 1.* + +## Citation + +We provide config files to reproduce the object detection results in the paper [Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection](https://arxiv.org/abs/2006.04388) + +```latex +@article{li2020generalized, + title={Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection}, + author={Li, Xiang and Wang, Wenhai and Wu, Lijun and Chen, Shuo and Hu, Xiaolin and Li, Jun and Tang, Jinhui and Yang, Jian}, + journal={arXiv preprint arXiv:2006.04388}, + year={2020} +} +``` diff --git a/configs/mmdet/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py b/configs/mmdet/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 00000000..b72c2b6e --- /dev/null +++ b/configs/mmdet/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/gfl/gfl_r101_fpn_mstrain_2x_coco.py b/configs/mmdet/gfl/gfl_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 00000000..e33b5c0d --- /dev/null +++ b/configs/mmdet/gfl/gfl_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/gfl/gfl_r50_fpn_1x_coco.py b/configs/mmdet/gfl/gfl_r50_fpn_1x_coco.py new file mode 100644 index 00000000..cfd4b023 --- /dev/null +++ b/configs/mmdet/gfl/gfl_r50_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='GFL', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='GFLHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/gfl/gfl_r50_fpn_mstrain_2x_coco.py b/configs/mmdet/gfl/gfl_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 00000000..b8be6014 --- /dev/null +++ b/configs/mmdet/gfl/gfl_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,22 @@ +_base_ = './gfl_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/configs/mmdet/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py b/configs/mmdet/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py new file mode 100644 index 00000000..25398075 --- /dev/null +++ b/configs/mmdet/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py @@ -0,0 +1,18 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, False, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py b/configs/mmdet/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 00000000..effda195 --- /dev/null +++ b/configs/mmdet/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/gfl/metafile.yml b/configs/mmdet/gfl/metafile.yml new file mode 100644 index 00000000..8f049c6b --- /dev/null +++ b/configs/mmdet/gfl/metafile.yml @@ -0,0 +1,134 @@ +Collections: + - Name: Generalized Focal Loss + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generalized Focal Loss + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2006.04388 + Title: 'Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection' + README: configs/gfl/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/detectors/gfl.py#L6 + Version: v2.2.0 + +Models: + - Name: gfl_r50_fpn_1x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_1x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth + + - Name: gfl_r50_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth + + - Name: gfl_r101_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth + + - Name: gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth + + - Name: gfl_x101_32x4d_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth + + - Name: gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 93.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth diff --git a/configs/mmdet/ghm/README.md b/configs/mmdet/ghm/README.md new file mode 100644 index 00000000..6a8e99e5 --- /dev/null +++ b/configs/mmdet/ghm/README.md @@ -0,0 +1,33 @@ +# GHM + +> [Gradient Harmonized Single-stage Detector](https://arxiv.org/abs/1811.05181) + + + +## Abstract + +Despite the great success of two-stage detectors, single-stage detector is still a more elegant and efficient way, yet suffers from the two well-known disharmonies during training, i.e. the huge difference in quantity between positive and negative examples as well as between easy and hard examples. In this work, we first point out that the essential effect of the two disharmonies can be summarized in term of the gradient. Further, we propose a novel gradient harmonizing mechanism (GHM) to be a hedging for the disharmonies. The philosophy behind GHM can be easily embedded into both classification loss function like cross-entropy (CE) and regression loss function like smooth-L1 (SL1) loss. To this end, two novel loss functions called GHM-C and GHM-R are designed to balancing the gradient flow for anchor classification and bounding box refinement, respectively. Ablation study on MS COCO demonstrates that without laborious hyper-parameter tuning, both GHM-C and GHM-R can bring substantial improvement for single-stage detector. Without any whistles and bells, our model achieves 41.6 mAP on COCO test-dev set which surpasses the state-of-the-art method, Focal Loss (FL) + SL1, by 0.8. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 4.0 | 3.3 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130_004213.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 4.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130_145259.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 5.1 | 40.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131_113653.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 5.2 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131_113723.log.json) | + +## Citation + +```latex +@inproceedings{li2019gradient, + title={Gradient Harmonized Single-stage Detector}, + author={Li, Buyu and Liu, Yu and Wang, Xiaogang}, + booktitle={AAAI Conference on Artificial Intelligence}, + year={2019} +} +``` diff --git a/configs/mmdet/ghm/metafile.yml b/configs/mmdet/ghm/metafile.yml new file mode 100644 index 00000000..b4f488c4 --- /dev/null +++ b/configs/mmdet/ghm/metafile.yml @@ -0,0 +1,101 @@ +Collections: + - Name: GHM + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - GHM-C + - GHM-R + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1811.05181 + Title: 'Gradient Harmonized Single-stage Detector' + README: configs/ghm/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/losses/ghm_loss.py#L21 + Version: v2.0.0 + +Models: + - Name: retinanet_ghm_r50_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 303.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth + + - Name: retinanet_ghm_r101_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth + + - Name: retinanet_ghm_x101_32x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 196.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth + + - Name: retinanet_ghm_x101_64x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 192.31 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth diff --git a/configs/mmdet/ghm/retinanet_ghm_r101_fpn_1x_coco.py b/configs/mmdet/ghm/retinanet_ghm_r101_fpn_1x_coco.py new file mode 100644 index 00000000..aaf6fc26 --- /dev/null +++ b/configs/mmdet/ghm/retinanet_ghm_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/ghm/retinanet_ghm_r50_fpn_1x_coco.py b/configs/mmdet/ghm/retinanet_ghm_r50_fpn_1x_coco.py new file mode 100644 index 00000000..61b97510 --- /dev/null +++ b/configs/mmdet/ghm/retinanet_ghm_r50_fpn_1x_coco.py @@ -0,0 +1,19 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + loss_cls=dict( + _delete_=True, + type='GHMC', + bins=30, + momentum=0.75, + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + _delete_=True, + type='GHMR', + mu=0.02, + bins=10, + momentum=0.7, + loss_weight=10.0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..cd2e4cc3 --- /dev/null +++ b/configs/mmdet/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..b6107d8c --- /dev/null +++ b/configs/mmdet/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/gn+ws/README.md b/configs/mmdet/gn+ws/README.md new file mode 100644 index 00000000..7f1dec11 --- /dev/null +++ b/configs/mmdet/gn+ws/README.md @@ -0,0 +1,54 @@ +# GN + WS + +> [Weight Standardization](https://arxiv.org/abs/1903.10520) + + + +## Abstract + +Batch Normalization (BN) has become an out-of-box technique to improve deep network training. However, its effectiveness is limited for micro-batch training, i.e., each GPU typically has only 1-2 images for training, which is inevitable for many computer vision tasks, e.g., object detection and semantic segmentation, constrained by memory consumption. To address this issue, we propose Weight Standardization (WS) and Batch-Channel Normalization (BCN) to bring two success factors of BN into micro-batch training: 1) the smoothing effects on the loss landscape and 2) the ability to avoid harmful elimination singularities along the training trajectory. WS standardizes the weights in convolutional layers to smooth the loss landscape by reducing the Lipschitz constants of the loss and the gradients; BCN combines batch and channel normalizations and leverages estimated statistics of the activations in convolutional layers to keep networks away from elimination singularities. We validate WS and BCN on comprehensive computer vision tasks, including image classification, object detection, instance segmentation, video recognition and semantic segmentation. All experimental results consistently show that WS and BCN improve micro-batch training significantly. Moreover, using WS and BCN with micro-batch training is even able to match or outperform the performances of BN with large-batch training. + +
+ +
+ +## Results and Models + +Faster R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 1x | 5.9 | 11.7 | 39.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130_210936.log.json) | +| R-101-FPN | pytorch | GN+WS | 1x | 8.9 | 9.0 | 41.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205_232146.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 1x | 7.0 | 10.3 | 40.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203_220113.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 1x | 10.8 | 7.6 | 42.1 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212_195302.log.json) | + +Mask R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:---------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 2x | 7.3 | 10.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226_062128.log.json) | +| R-101-FPN | pytorch | GN+WS | 2x | 10.3 | 8.6 | 42.0 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212_213627.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 2x | 8.4 | 9.3 | 41.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216_201500.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 2x | 12.2 | 7.1 | 42.1 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319_104101.log.json) | +| R-50-FPN | pytorch | GN+WS | 20-23-24e | 7.3 | - | 41.1 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213_035123.log.json) | +| R-101-FPN | pytorch | GN+WS | 20-23-24e | 10.3 | - | 43.1 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213_130142.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 8.4 | - | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226_093732.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 12.2 | - | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316_013741.log.json) | + +Note: + +- GN+WS requires about 5% more memory than GN, and it is only 5% slower than GN. +- In the paper, a 20-23-24e lr schedule is used instead of 2x. +- The X-50-GN and X-101-GN pretrained models are also shared by the authors. + +## Citation + +```latex +@article{weightstandardization, + author = {Siyuan Qiao and Huiyu Wang and Chenxi Liu and Wei Shen and Alan Yuille}, + title = {Weight Standardization}, + journal = {arXiv preprint arXiv:1903.10520}, + year = {2019}, +} +``` diff --git a/configs/mmdet/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py b/configs/mmdet/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 00000000..cd2cb2b6 --- /dev/null +++ b/configs/mmdet/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/configs/mmdet/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py b/configs/mmdet/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 00000000..1b326b88 --- /dev/null +++ b/configs/mmdet/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg))) diff --git a/configs/mmdet/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py b/configs/mmdet/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 00000000..f64ae891 --- /dev/null +++ b/configs/mmdet/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/configs/mmdet/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py b/configs/mmdet/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 00000000..246851b9 --- /dev/null +++ b/configs/mmdet/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py b/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 00000000..a790d932 --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py b/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 00000000..a9fa6a24 --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py b/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 00000000..55168085 --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py b/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 00000000..63be60ff --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,20 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg), + mask_head=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 00000000..cfa14c99 --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py b/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 00000000..6498b03f --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 00000000..79ce0adf --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py b/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 00000000..7fac3175 --- /dev/null +++ b/configs/mmdet/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/configs/mmdet/gn+ws/metafile.yml b/configs/mmdet/gn+ws/metafile.yml new file mode 100644 index 00000000..bc89359c --- /dev/null +++ b/configs/mmdet/gn+ws/metafile.yml @@ -0,0 +1,263 @@ +Collections: + - Name: Weight Standardization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - Weight Standardization + Paper: + URL: https://arxiv.org/abs/1903.10520 + Title: 'Weight Standardization' + README: configs/gn+ws/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth + + - Name: faster_rcnn_r101_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 8.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth + + - Name: faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 97.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 131.58 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 8.4 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 140.85 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 10.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 8.4 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth diff --git a/configs/mmdet/gn/README.md b/configs/mmdet/gn/README.md new file mode 100644 index 00000000..36602faf --- /dev/null +++ b/configs/mmdet/gn/README.md @@ -0,0 +1,41 @@ +# GN + +> [Group Normalization](https://arxiv.org/abs/1803.08494) + + + +## Abstract + +Batch Normalization (BN) is a milestone technique in the development of deep learning, enabling various networks to train. However, normalizing along the batch dimension introduces problems --- BN's error increases rapidly when the batch size becomes smaller, caused by inaccurate batch statistics estimation. This limits BN's usage for training larger models and transferring features to computer vision tasks including detection, segmentation, and video, which require small batches constrained by memory consumption. In this paper, we present Group Normalization (GN) as a simple alternative to BN. GN divides the channels into groups and computes within each group the mean and variance for normalization. GN's computation is independent of batch sizes, and its accuracy is stable in a wide range of batch sizes. On ResNet-50 trained in ImageNet, GN has 10.6% lower error than its BN counterpart when using a batch size of 2; when using typical batch sizes, GN is comparably good with BN and outperforms other normalization variants. Moreover, GN can be naturally transferred from pre-training to fine-tuning. GN can outperform its BN-based counterparts for object detection and segmentation in COCO, and for video classification in Kinetics, showing that GN can effectively replace the powerful BN in a variety of tasks. GN can be easily implemented by a few lines of code in modern libraries. + +
+ +
+ +## Results and Models + +| Backbone | model | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN (d) | Mask R-CNN | 2x | 7.1 | 11.0 | 40.2 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206_050355.log.json) | +| R-50-FPN (d) | Mask R-CNN | 3x | 7.1 | - | 40.5 | 36.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214_063512.log.json) | +| R-101-FPN (d) | Mask R-CNN | 2x | 9.9 | 9.0 | 41.9 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205_234402.log.json) | +| R-101-FPN (d) | Mask R-CNN | 3x | 9.9 | | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609.log.json) | +| R-50-FPN (c) | Mask R-CNN | 2x | 7.1 | 10.9 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207_225832.log.json) | +| R-50-FPN (c) | Mask R-CNN | 3x | 7.1 | - | 40.1 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225_235135.log.json) | + +**Notes:** + +- (d) means pretrained model converted from Detectron, and (c) means the contributed model pretrained by [@thangvubk](https://github.com/thangvubk). +- The `3x` schedule is epoch [28, 34, 36]. +- **Memory, Train/Inf time is outdated.** + +## Citation + +```latex +@inproceedings{wu2018group, + title={Group Normalization}, + author={Wu, Yuxin and He, Kaiming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2018} +} +``` diff --git a/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py b/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py new file mode 100644 index 00000000..a505ba0e --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_gn'))) diff --git a/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py b/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py new file mode 100644 index 00000000..12a9d17e --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r101_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py new file mode 100644 index 00000000..1de7d98e --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py @@ -0,0 +1,49 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py new file mode 100644 index 00000000..f9177196 --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py new file mode 100644 index 00000000..2f430fda --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://contrib/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py new file mode 100644 index 00000000..66834f08 --- /dev/null +++ b/configs/mmdet/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/gn/metafile.yml b/configs/mmdet/gn/metafile.yml new file mode 100644 index 00000000..4a1ecae0 --- /dev/null +++ b/configs/mmdet/gn/metafile.yml @@ -0,0 +1,162 @@ +Collections: + - Name: Group Normalization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + Paper: + URL: https://arxiv.org/abs/1803.08494 + Title: 'Group Normalization' + README: configs/gn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth + + - Name: mask_rcnn_r50_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth + + - Name: mask_rcnn_r101_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth + + - Name: mask_rcnn_r101_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth diff --git a/configs/mmdet/grid_rcnn/README.md b/configs/mmdet/grid_rcnn/README.md new file mode 100644 index 00000000..9b27c96e --- /dev/null +++ b/configs/mmdet/grid_rcnn/README.md @@ -0,0 +1,47 @@ +# Grid R-CNN + +> [Grid R-CNN](https://arxiv.org/abs/1811.12030) + + + +## Abstract + +This paper proposes a novel object detection framework named Grid R-CNN, which adopts a grid guided localization mechanism for accurate object detection. Different from the traditional regression based methods, the Grid R-CNN captures the spatial information explicitly and enjoys the position sensitive property of fully convolutional architecture. Instead of using only two independent points, we design a multi-point supervision formulation to encode more clues in order to reduce the impact of inaccurate prediction of specific points. To take the full advantage of the correlation of points in a grid, we propose a two-stage information fusion strategy to fuse feature maps of neighbor grid points. The grid guided localization approach is easy to be extended to different state-of-the-art detection frameworks. Grid R-CNN leads to high quality object localization, and experiments demonstrate that it achieves a 4.1% AP gain at IoU=0.8 and a 10.0% AP gain at IoU=0.9 on COCO benchmark compared to Faster R-CNN with Res50 backbone and FPN architecture. + +Grid R-CNN is a well-performed objection detection framework. It transforms the traditional box offset regression problem into a grid point estimation problem. With the guidance of the grid points, it can obtain high-quality localization results. However, the speed of Grid R-CNN is not so satisfactory. In this technical report we present Grid R-CNN Plus, a better and faster version of Grid R-CNN. We have made several updates that significantly speed up the framework and simultaneously improve the accuracy. On COCO dataset, the Res50-FPN based Grid R-CNN Plus detector achieves an mAP of 40.4%, outperforming the baseline on the same model by 3.0 points with similar inference time. + +
+ +
+ +## Results and Models + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 2x | 5.1 | 15.0 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130_221140.log.json) | +| R-101 | 2x | 7.0 | 12.6 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309_164224.log.json) | +| X-101-32x4d | 2x | 8.3 | 10.8 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130_215413.log.json) | +| X-101-64x4d | 2x | 11.3 | 7.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204_080641.log.json) | + +**Notes:** + +- All models are trained with 8 GPUs instead of 32 GPUs in the original paper. +- The warming up lasts for 1 epoch and `2x` here indicates 25 epochs. + +## Citation + +```latex +@inproceedings{lu2019grid, + title={Grid r-cnn}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{lu2019grid, + title={Grid R-CNN Plus: Faster and Better}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + journal={arXiv preprint arXiv:1906.05688}, + year={2019} +} +``` diff --git a/configs/mmdet/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py b/configs/mmdet/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py new file mode 100644 index 00000000..1bb5889b --- /dev/null +++ b/configs/mmdet/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py b/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py new file mode 100644 index 00000000..4aa00ece --- /dev/null +++ b/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = ['grid_rcnn_r50_fpn_gn-head_2x_coco.py'] +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py b/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py new file mode 100644 index 00000000..df63cd5d --- /dev/null +++ b/configs/mmdet/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py @@ -0,0 +1,131 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='GridRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='GridRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + with_reg=False, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False), + grid_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + grid_head=dict( + type='GridHead', + grid_points=9, + num_convs=8, + in_channels=256, + point_feat_channels=64, + norm_cfg=dict(type='GN', num_groups=36), + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=15))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_radius=1, + pos_weight=-1, + max_num_grid=192, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.03, + nms=dict(type='nms', iou_threshold=0.3), + max_per_img=100))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/configs/mmdet/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py b/configs/mmdet/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 00000000..3bc8516e --- /dev/null +++ b/configs/mmdet/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,24 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/configs/mmdet/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py b/configs/mmdet/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 00000000..c78f8f65 --- /dev/null +++ b/configs/mmdet/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/grid_rcnn/metafile.yml b/configs/mmdet/grid_rcnn/metafile.yml new file mode 100644 index 00000000..d1aa8513 --- /dev/null +++ b/configs/mmdet/grid_rcnn/metafile.yml @@ -0,0 +1,101 @@ +Collections: + - Name: Grid R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - Dilated Convolution + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1906.05688 + Title: 'Grid R-CNN' + README: configs/grid_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/grid_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: grid_rcnn_r50_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth + + - Name: grid_rcnn_r101_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 79.37 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth + + - Name: grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 8.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth + + - Name: grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 11.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth diff --git a/configs/mmdet/groie/README.md b/configs/mmdet/groie/README.md new file mode 100644 index 00000000..989a2ed8 --- /dev/null +++ b/configs/mmdet/groie/README.md @@ -0,0 +1,72 @@ +# GRoIE + +> [A novel Region of Interest Extraction Layer for Instance Segmentation](https://arxiv.org/abs/2004.13665) + + + +## Abstract + +Given the wide diffusion of deep neural network architectures for computer vision tasks, several new applications are nowadays more and more feasible. Among them, a particular attention has been recently given to instance segmentation, by exploiting the results achievable by two-stage networks (such as Mask R-CNN or Faster R-CNN), derived from R-CNN. In these complex architectures, a crucial role is played by the Region of Interest (RoI) extraction layer, devoted to extracting a coherent subset of features from a single Feature Pyramid Network (FPN) layer attached on top of a backbone. +This paper is motivated by the need to overcome the limitations of existing RoI extractors which select only one (the best) layer from FPN. Our intuition is that all the layers of FPN retain useful information. Therefore, the proposed layer (called Generic RoI Extractor - GRoIE) introduces non-local building blocks and attention mechanisms to boost the performance. +A comprehensive ablation study at component level is conducted to find the best set of algorithms and parameters for the GRoIE layer. Moreover, GRoIE can be integrated seamlessly with every two-stage architecture for both object detection and instance segmentation tasks. Therefore, the improvements brought about by the use of GRoIE in different state-of-the-art architectures are also evaluated. The proposed layer leads up to gain a 1.1% AP improvement on bounding box detection and 1.7% AP improvement on instance segmentation. + +
+ +
+ +## Introduction + +By Leonardo Rossi, Akbar Karimi and Andrea Prati from +[IMPLab](http://implab.ce.unipr.it/). + +We provide configs to reproduce the results in the paper for +"*A novel Region of Interest Extraction Layer for Instance Segmentation*" +on COCO object detection. + +This paper is motivated by the need to overcome to the limitations of existing +RoI extractors which select only one (the best) layer from FPN. + +Our intuition is that all the layers of FPN retain useful information. + +Therefore, the proposed layer (called Generic RoI Extractor - **GRoIE**) +introduces non-local building blocks and attention mechanisms to boost the +performance. + +## Results and Models + +The results on COCO 2017 minival (5k images) are shown in the below table. + +### Application of GRoIE to different architectures + +| Backbone | Method | Lr schd | box AP | mask AP | Config | Download| +| :-------: | :--------------: | :-----: | :----: | :-----: | :-------:| :--------:| +| R-50-FPN | Faster Original | 1x | 37.4 | | [config](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | + GRoIE | 1x | 38.3 | | [config](./faster_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | Grid R-CNN | 1x | 39.1 | | [config](./grid_rcnn_r50_fpn_gn-head_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059-64f00ee8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059.log.json) | +| R-50-FPN | + GRoIE | 1x | | | [config](./grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py)|| +| R-50-FPN | Mask R-CNN | 1x | 38.2 | 34.7 | [config](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | + GRoIE | 1x | 39.0 | 36.0 | [config](./mask_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | GC-Net | 1x | 40.7 | 36.5 | [config](../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-50-FPN | + GRoIE | 1x | 41.0 | 37.8 | [config](./mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | +| R-101-FPN | GC-Net | 1x | 42.2 | 37.8 | [config](../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| R-101-FPN | + GRoIE | 1x | 42.6 | 38.7 | [config](./mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507.log.json) | + +## Citation + +If you use this work or benchmark in your research, please cite this project. + +```latex +@inproceedings{rossi2021novel, + title={A novel region of interest extraction layer for instance segmentation}, + author={Rossi, Leonardo and Karimi, Akbar and Prati, Andrea}, + booktitle={2020 25th International Conference on Pattern Recognition (ICPR)}, + pages={2203--2209}, + year={2021}, + organization={IEEE} +} +``` + +## Contact + +The implementation of GRoIE is currently maintained by +[Leonardo Rossi](https://github.com/hachreak/). diff --git a/configs/mmdet/groie/faster_rcnn_r50_fpn_groie_1x_coco.py b/configs/mmdet/groie/faster_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 00000000..0fc528bf --- /dev/null +++ b/configs/mmdet/groie/faster_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,25 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/configs/mmdet/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py b/configs/mmdet/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py new file mode 100644 index 00000000..8e4b4ab2 --- /dev/null +++ b/configs/mmdet/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + grid_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/configs/mmdet/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/configs/mmdet/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 00000000..8b837221 --- /dev/null +++ b/configs/mmdet/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/configs/mmdet/groie/mask_rcnn_r50_fpn_groie_1x_coco.py b/configs/mmdet/groie/mask_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 00000000..81dfb487 --- /dev/null +++ b/configs/mmdet/groie/mask_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/configs/mmdet/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/configs/mmdet/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 00000000..852c5ca7 --- /dev/null +++ b/configs/mmdet/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/configs/mmdet/groie/metafile.yml b/configs/mmdet/groie/metafile.yml new file mode 100644 index 00000000..269cb393 --- /dev/null +++ b/configs/mmdet/groie/metafile.yml @@ -0,0 +1,93 @@ +Collections: + - Name: GRoIE + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generic RoI Extractor + - FPN + - RPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/2004.13665 + Title: 'A novel Region of Interest Extraction Layer for Instance Segmentation' + README: configs/groie/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/roi_heads/roi_extractors/groie.py#L15 + Version: v2.1.0 + +Models: + - Name: faster_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth + + - Name: grid_rcnn_r50_fpn_gn-head_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + + - Name: mask_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth diff --git a/configs/mmdet/guided_anchoring/README.md b/configs/mmdet/guided_anchoring/README.md new file mode 100644 index 00000000..b42de99b --- /dev/null +++ b/configs/mmdet/guided_anchoring/README.md @@ -0,0 +1,59 @@ +# Guided Anchoring + +> [Region Proposal by Guided Anchoring](https://arxiv.org/abs/1901.03278) + + + +## Abstract + +Region anchors are the cornerstone of modern object detection techniques. State-of-the-art detectors mostly rely on a dense anchoring scheme, where anchors are sampled uniformly over the spatial domain with a predefined set of scales and aspect ratios. In this paper, we revisit this foundational stage. Our study shows that it can be done much more effectively and efficiently. Specifically, we present an alternative scheme, named Guided Anchoring, which leverages semantic features to guide the anchoring. The proposed method jointly predicts the locations where the center of objects of interest are likely to exist as well as the scales and aspect ratios at different locations. On top of predicted anchor shapes, we mitigate the feature inconsistency with a feature adaption module. We also study the use of high-quality proposals to improve detection performance. The anchoring scheme can be seamlessly integrated into proposal methods and detectors. With Guided Anchoring, we achieve 9.1% higher recall on MS COCO with 90% fewer anchors than the RPN baseline. We also adopt Guided Anchoring in Fast R-CNN, Faster R-CNN and RetinaNet, respectively improving the detection mAP by 2.2%, 2.7% and 1.2%. + +
+ +
+ +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR 1000 | Config | Download | +| :----: | :-------------: | :-----: | :-----: | :------: | :------------: | :-----: | :------: | :--------: | +| GA-RPN | R-50-FPN | caffe | 1x | 5.3 | 15.8 | 68.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531_011819.log.json) | +| GA-RPN | R-101-FPN | caffe | 1x | 7.3 | 13.0 | 69.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531_011812.log.json) | +| GA-RPN | X-101-32x4d-FPN | pytorch | 1x | 8.5 | 10.0 | 70.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220_221326.log.json) | +| GA-RPN | X-101-64x4d-FPN | pytorch | 1x | 7.1 | 7.5 | 71.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225_152704.log.json) | + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------------: | :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| GA-Faster RCNN | R-50-FPN | caffe | 1x | 5.5 | | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718.log.json) | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | 7.5 | | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_20200505_115528.log.json) | +| GA-Faster RCNN | X-101-32x4d-FPN | pytorch | 1x | 8.7 | 9.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215_184547.log.json) | +| GA-Faster RCNN | X-101-64x4d-FPN | pytorch | 1x | 11.8 | 7.3 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215_104455.log.json) | +| GA-RetinaNet | R-50-FPN | caffe | 1x | 3.5 | 16.8 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020_225450.log.json) | +| GA-RetinaNet | R-101-FPN | caffe | 1x | 5.5 | 12.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531_012847.log.json) | +| GA-RetinaNet | X-101-32x4d-FPN | pytorch | 1x | 6.9 | 10.6 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219_223025.log.json) | +| GA-RetinaNet | X-101-64x4d-FPN | pytorch | 1x | 9.9 | 7.7 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226_221123.log.json) | + +- In the Guided Anchoring paper, `score_thr` is set to 0.001 in Fast/Faster RCNN and 0.05 in RetinaNet for both baselines and Guided Anchoring. + +- Performance on COCO test-dev benchmark are shown as follows. + +| Method | Backbone | Style | Lr schd | Aug Train | Score thr | AP | AP_50 | AP_75 | AP_small | AP_medium | AP_large | Download | +| :------------: | :-------: | :---: | :-----: | :-------: | :-------: | :---: | :---: | :---: | :------: | :-------: | :------: | :------: | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.001 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 2x | T | 0.05 | | | | | | | | + +## Citation + +We provide config files to reproduce the results in the CVPR 2019 paper for [Region Proposal by Guided Anchoring](https://arxiv.org/abs/1901.03278). + +```latex +@inproceedings{wang2019region, + title={Region Proposal by Guided Anchoring}, + author={Jiaqi Wang and Kai Chen and Shuo Yang and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` diff --git a/configs/mmdet/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..8fc203c6 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..a40e7c6f --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_faster_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..b0add92c --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_faster_r50_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_faster_r50_fpn_1x_coco.py new file mode 100644 index 00000000..e3d82389 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_faster_r50_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..f1dda949 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..fb9e2afc --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..1b1cccd0 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py b/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py new file mode 100644 index 00000000..260895b4 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py @@ -0,0 +1,169 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5), + bbox_head=dict( + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + center_ratio=0.2, + ignore_ratio=0.5, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + keep_ratio=True, + multiscale_mode='range'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[16, 22]) +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..33512011 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 00000000..76947235 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..c5eb34f5 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..5c69a6f8 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..039703ec --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = './ga_rpn_r50_caffe_fpn_1x_coco.py' +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..7830894a --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..27ab3e73 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..cccc985f --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..4e134d23 --- /dev/null +++ b/configs/mmdet/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/guided_anchoring/metafile.yml b/configs/mmdet/guided_anchoring/metafile.yml new file mode 100644 index 00000000..3019d4a1 --- /dev/null +++ b/configs/mmdet/guided_anchoring/metafile.yml @@ -0,0 +1,246 @@ +Collections: + - Name: Guided Anchoring + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Guided Anchoring + - ResNet + Paper: + URL: https://arxiv.org/abs/1901.03278 + Title: 'Region Proposal by Guided Anchoring' + README: configs/guided_anchoring/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/dense_heads/ga_retina_head.py#L10 + Version: v2.0.0 + +Models: + - Name: ga_rpn_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.3 + inference time (ms/im): + - value: 63.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 68.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth + + - Name: ga_rpn_r101_caffe_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 69.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth + + - Name: ga_rpn_x101_32x4d_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth + + - Name: ga_rpn_x101_64x4d_fpn_1x_coco.py.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py.py.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 133.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth + + - Name: ga_faster_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth + + - Name: ga_faster_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth + + - Name: ga_faster_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.7 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth + + - Name: ga_faster_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.8 + inference time (ms/im): + - value: 136.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth + + - Name: ga_retinanet_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.5 + inference time (ms/im): + - value: 59.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth + + - Name: ga_retinanet_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth + + - Name: ga_retinanet_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.9 + inference time (ms/im): + - value: 94.34 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth + + - Name: ga_retinanet_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth diff --git a/configs/mmdet/hrnet/README.md b/configs/mmdet/hrnet/README.md new file mode 100644 index 00000000..f1a9d964 --- /dev/null +++ b/configs/mmdet/hrnet/README.md @@ -0,0 +1,101 @@ +# HRNet + +> [Deep High-Resolution Representation Learning for Human Pose Estimation](https://arxiv.org/abs/1902.09212) + + + +## Abstract + +This is an official pytorch implementation of Deep High-Resolution Representation Learning for Human Pose Estimation. In this work, we are interested in the human pose estimation problem with a focus on learning reliable high-resolution representations. Most existing methods recover high-resolution representations from low-resolution representations produced by a high-to-low resolution network. Instead, our proposed network maintains high-resolution representations through the whole process. We start from a high-resolution subnetwork as the first stage, gradually add high-to-low resolution subnetworks one by one to form more stages, and connect the mutli-resolution subnetworks in parallel. We conduct repeated multi-scale fusions such that each of the high-to-low resolution representations receives information from other parallel representations over and over, leading to rich high-resolution representations. As a result, the predicted keypoint heatmap is potentially more accurate and spatially more precise. We empirically demonstrate the effectiveness of our network through the superior pose estimation results over two benchmark datasets: the COCO keypoint detection dataset and the MPII Human Pose dataset. + +High-resolution representation learning plays an essential role in many vision problems, e.g., pose estimation and semantic segmentation. The high-resolution network (HRNet), recently developed for human pose estimation, maintains high-resolution representations through the whole process by connecting high-to-low resolution convolutions in parallel and produces strong high-resolution representations by repeatedly conducting fusions across parallel convolutions. +In this paper, we conduct a further study on high-resolution representations by introducing a simple yet effective modification and apply it to a wide range of vision tasks. We augment the high-resolution representation by aggregating the (upsampled) representations from all the parallel convolutions rather than only the representation from the high-resolution convolution as done in HRNet. This simple modification leads to stronger representations, evidenced by superior results. We show top results in semantic segmentation on Cityscapes, LIP, and PASCAL Context, and facial landmark detection on AFLW, COFW, 300W, and WFLW. In addition, we build a multi-level representation from the high-resolution representation and apply it to the Faster R-CNN object detection framework and the extended frameworks. The proposed approach achieves superior results to existing single-model networks on COCO object detection. + +
+ +
+ +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:| :--------:| +| HRNetV2p-W18 | pytorch | 1x | 6.6 | 13.4 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130_211246.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 6.6 | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.0 | 12.4 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130_204442.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.0 | - | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.4 | 10.5 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210_125315.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.4 | - | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 1x | 7.0 | 11.7 | 37.7 | 34.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205_232523.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 7.0 | - | 39.8 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212_134222.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.4 | 11.3 | 41.2 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207_055017.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.4 | - | 42.5 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213_150518.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.9 | | 42.1 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.9 | | 42.8 | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------: | :--------: | +| HRNetV2p-W18 | pytorch | 20e | 7.0 | 11.0 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210_105632.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 9.4 | 11.0 | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208_160511.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 10.8 | | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 8.5 | 8.5 |41.6 |36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210_093149.log.json) | +| HRNetV2p-W32 | pytorch | 20e | | 8.3 |44.3 |38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 12.5 | |45.1 |39.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 10.8 | 4.7 | 42.8 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210_182735.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 13.1 | 4.9 | 45.4 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207_193153.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 14.6 | | 46.4 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411.log.json) | + +### FCOS + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:------:|:------:|:------:|:------:|:--------:| +|HRNetV2p-W18| pytorch | Y | N | 1x | 13.0 | 12.9 | 35.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710.log.json) | +|HRNetV2p-W18| pytorch | Y | N | 2x | 13.0 | - | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 1x | 17.5 | 12.9 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 2x | 17.5 | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133.log.json) | +|HRNetV2p-W18| pytorch | Y | Y | 2x | 13.0 | 12.9 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651.log.json) | +|HRNetV2p-W32| pytorch | Y | Y | 2x | 17.5 | 12.4 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846.log.json) | +|HRNetV2p-W48| pytorch | Y | Y | 2x | 20.3 | 10.8 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752.log.json) | + +**Note:** + +- The `28e` schedule in HTC indicates decreasing the lr at 24 and 27 epochs, with a total of 28 epochs. +- HRNetV2 ImageNet pretrained models are in [HRNets for Image Classification](https://github.com/HRNet/HRNet-Image-Classification). + +## Citation + +```latex +@inproceedings{SunXLW19, + title={Deep High-Resolution Representation Learning for Human Pose Estimation}, + author={Ke Sun and Bin Xiao and Dong Liu and Jingdong Wang}, + booktitle={CVPR}, + year={2019} +} + +@article{SunZJCXLMWLW19, + title={High-Resolution Representations for Labeling Pixels and Regions}, + author={Ke Sun and Yang Zhao and Borui Jiang and Tianheng Cheng and Bin Xiao + and Dong Liu and Yadong Mu and Xinggang Wang and Wenyu Liu and Jingdong Wang}, + journal = {CoRR}, + volume = {abs/1904.04514}, + year={2019} +} +``` diff --git a/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 00000000..839cf3eb --- /dev/null +++ b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 00000000..99426027 --- /dev/null +++ b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 00000000..10d5e83c --- /dev/null +++ b/configs/mmdet/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 00000000..ebd5e202 --- /dev/null +++ b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 00000000..e7f89a9e --- /dev/null +++ b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 00000000..265e8d63 --- /dev/null +++ b/configs/mmdet/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 00000000..1df2c3db --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 00000000..a4b987a1 --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './faster_rcnn_hrnetv2p_w18_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 00000000..be058099 --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 00000000..63c87171 --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 00000000..886a7c90 --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 00000000..585cc2c3 --- /dev/null +++ b/configs/mmdet/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py new file mode 100644 index 00000000..fd662bd1 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py new file mode 100644 index 00000000..34975959 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 00000000..37bfdae9 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py new file mode 100644 index 00000000..10617f24 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = '../fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256, + stride=2, + num_outs=5)) +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py new file mode 100644 index 00000000..7b381307 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 00000000..482f8872 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py b/configs/mmdet/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 00000000..0ae9dbe3 --- /dev/null +++ b/configs/mmdet/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,11 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/htc_hrnetv2p_w18_20e_coco.py b/configs/mmdet/hrnet/htc_hrnetv2p_w18_20e_coco.py new file mode 100644 index 00000000..3c2eb1dd --- /dev/null +++ b/configs/mmdet/hrnet/htc_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/htc_hrnetv2p_w32_20e_coco.py b/configs/mmdet/hrnet/htc_hrnetv2p_w32_20e_coco.py new file mode 100644 index 00000000..545cb83e --- /dev/null +++ b/configs/mmdet/hrnet/htc_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,37 @@ +_base_ = '../htc/htc_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/configs/mmdet/hrnet/htc_hrnetv2p_w40_20e_coco.py b/configs/mmdet/hrnet/htc_hrnetv2p_w40_20e_coco.py new file mode 100644 index 00000000..94bff1bc --- /dev/null +++ b/configs/mmdet/hrnet/htc_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/htc_hrnetv2p_w40_28e_coco.py b/configs/mmdet/hrnet/htc_hrnetv2p_w40_28e_coco.py new file mode 100644 index 00000000..7067e8b6 --- /dev/null +++ b/configs/mmdet/hrnet/htc_hrnetv2p_w40_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_hrnetv2p_w40_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/configs/mmdet/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py b/configs/mmdet/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py new file mode 100644 index 00000000..815f2857 --- /dev/null +++ b/configs/mmdet/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 00000000..cb12200e --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 00000000..ca62682a --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 00000000..d5f0eb56 --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 00000000..63d5d139 --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 00000000..5a76f4b0 --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 00000000..3a2a5106 --- /dev/null +++ b/configs/mmdet/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/hrnet/metafile.yml b/configs/mmdet/hrnet/metafile.yml new file mode 100644 index 00000000..ac36efa9 --- /dev/null +++ b/configs/mmdet/hrnet/metafile.yml @@ -0,0 +1,971 @@ +Models: + - Name: faster_rcnn_hrnetv2p_w18_1x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: faster_rcnn_hrnetv2p_w18_2x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: faster_rcnn_hrnetv2p_w32_1x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: faster_rcnn_hrnetv2p_w32_2x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: faster_rcnn_hrnetv2p_w40_1x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: faster_rcnn_hrnetv2p_w40_2x_coco + In Collection: Faster R-CNN + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w18_1x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w18_2x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w32_1x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w32_2x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w40_1x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: mask_rcnn_hrnetv2p_w40_2x_coco + In Collection: Mask R-CNN + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_rcnn_hrnetv2p_w18_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_rcnn_hrnetv2p_w32_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_rcnn_hrnetv2p_w40_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_mask_rcnn_hrnetv2p_w18_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_mask_rcnn_hrnetv2p_w32_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + inference time (ms/im): + - value: 120.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: cascade_mask_rcnn_hrnetv2p_w40_20e_coco + In Collection: Cascade R-CNN + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 12.5 + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: htc_hrnetv2p_w18_20e_coco + In Collection: HTC + Config: configs/hrnet/htc_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 212.77 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: htc_hrnetv2p_w32_20e_coco + In Collection: HTC + Config: configs/hrnet/htc_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 13.1 + inference time (ms/im): + - value: 204.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: htc_hrnetv2p_w40_20e_coco + In Collection: HTC + Config: configs/hrnet/htc_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 14.6 + Epochs: 20 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_1x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_2x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_1x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_2x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 + + - Name: fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FCOS + Config: configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 20.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Architecture: + - HRNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth + Paper: + URL: https://arxiv.org/abs/1904.04514 + Title: 'Deep High-Resolution Representation Learning for Visual Recognition' + README: configs/hrnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/backbones/hrnet.py#L195 + Version: v2.0.0 diff --git a/configs/mmdet/htc/README.md b/configs/mmdet/htc/README.md new file mode 100644 index 00000000..c57a5d18 --- /dev/null +++ b/configs/mmdet/htc/README.md @@ -0,0 +1,67 @@ +# HTC + +> [Hybrid Task Cascade for Instance Segmentation](ttps://arxiv.org/abs/1901.07518) + + + +## Abstract + +Cascade is a classic yet powerful architecture that has boosted performance on various tasks. However, how to introduce cascade to instance segmentation remains an open question. A simple combination of Cascade R-CNN and Mask R-CNN only brings limited gain. In exploring a more effective approach, we find that the key to a successful instance segmentation cascade is to fully leverage the reciprocal relationship between detection and segmentation. In this work, we propose a new framework, Hybrid Task Cascade (HTC), which differs in two important aspects: (1) instead of performing cascaded refinement on these two tasks separately, it interweaves them for a joint multi-stage processing; (2) it adopts a fully convolutional branch to provide spatial context, which can help distinguishing hard foreground from cluttered background. Overall, this framework can learn more discriminative features progressively while integrating complementary features together in each stage. Without bells and whistles, a single HTC obtains 38.4 and 1.5 improvement over a strong Cascade Mask R-CNN baseline on MSCOCO dataset. Moreover, our overall system achieves 48.6 mask AP on the test-challenge split, ranking 1st in the COCO 2018 Challenge Object Detection Task. + +
+ +
+ +## Introduction + +HTC requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 8.2 | 5.8 | 42.3 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317_070435.log.json) | +| R-50-FPN | pytorch | 20e | 8.2 | - | 43.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319_070313.log.json) | +| R-101-FPN | pytorch | 20e | 10.2 | 5.5 | 44.8 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317_153107.log.json) | +| X-101-32x4d-FPN | pytorch |20e| 11.4 | 5.0 | 46.1 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318_034519.log.json) | +| X-101-64x4d-FPN | pytorch |20e| 14.5 | 4.4 | 47.0 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318_081711.log.json) | + +- In the HTC paper and COCO 2018 Challenge, `score_thr` is set to 0.001 for both baselines and HTC. +- We use 8 GPUs with 2 images/GPU for R-50 and R-101 models, and 16 GPUs with 1 image/GPU for X-101 models. + If you would like to train X-101 HTC with 8 GPUs, you need to change the lr from 0.02 to 0.01. + +We also provide a powerful HTC with DCN and multi-scale training model. No testing augmentation is used. + +| Backbone | Style | DCN | training scales | Lr schd | box AP | mask AP | Config | Download | +|:----------------:|:-------:|:-----:|:---------------:|:-------:|:------:|:-------:|:------:|:--------:| +| X-101-64x4d-FPN | pytorch | c3-c5 | 400~1400 | 20e | 50.4 | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312_203410.log.json) | + +## Citation + +We provide config files to reproduce the results in the CVPR 2019 paper for [Hybrid Task Cascade](https://arxiv.org/abs/1901.07518). + +```latex +@inproceedings{chen2019hybrid, + title={Hybrid task cascade for instance segmentation}, + author={Chen, Kai and Pang, Jiangmiao and Wang, Jiaqi and Xiong, Yu and Li, Xiaoxiao and Sun, Shuyang and Feng, Wansen and Liu, Ziwei and Shi, Jianping and Ouyang, Wanli and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` diff --git a/configs/mmdet/htc/htc_r101_fpn_20e_coco.py b/configs/mmdet/htc/htc_r101_fpn_20e_coco.py new file mode 100644 index 00000000..b42297bf --- /dev/null +++ b/configs/mmdet/htc/htc_r101_fpn_20e_coco.py @@ -0,0 +1,9 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/htc/htc_r50_fpn_1x_coco.py b/configs/mmdet/htc/htc_r50_fpn_1x_coco.py new file mode 100644 index 00000000..1e8e18a0 --- /dev/null +++ b/configs/mmdet/htc/htc_r50_fpn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = './htc_without_semantic_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='FusedSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + loss_seg=dict( + type='CrossEntropyLoss', ignore_index=255, loss_weight=0.2)))) +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict( + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/htc/htc_r50_fpn_20e_coco.py b/configs/mmdet/htc/htc_r50_fpn_20e_coco.py new file mode 100644 index 00000000..7d2e0116 --- /dev/null +++ b/configs/mmdet/htc/htc_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/htc/htc_without_semantic_r50_fpn_1x_coco.py b/configs/mmdet/htc/htc_without_semantic_r50_fpn_1x_coco.py new file mode 100644 index 00000000..565104f4 --- /dev/null +++ b/configs/mmdet/htc/htc_without_semantic_r50_fpn_1x_coco.py @@ -0,0 +1,236 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='HybridTaskCascade', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='HybridTaskCascadeRoIHead', + interleaved=True, + mask_info_flow=True, + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=[ + dict( + type='HTCMaskHead', + with_conv_res=False, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.001, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + val=dict(pipeline=test_pipeline), test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py b/configs/mmdet/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py new file mode 100644 index 00000000..0c834f28 --- /dev/null +++ b/configs/mmdet/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py b/configs/mmdet/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py new file mode 100644 index 00000000..8b0d962b --- /dev/null +++ b/configs/mmdet/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py b/configs/mmdet/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py new file mode 100644 index 00000000..c8d87033 --- /dev/null +++ b/configs/mmdet/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py @@ -0,0 +1,43 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +data = dict( + samples_per_gpu=1, workers_per_gpu=1, train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/configs/mmdet/htc/metafile.yml b/configs/mmdet/htc/metafile.yml new file mode 100644 index 00000000..acd038c7 --- /dev/null +++ b/configs/mmdet/htc/metafile.yml @@ -0,0 +1,165 @@ +Collections: + - Name: HTC + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - HTC + - RPN + - ResNet + - ResNeXt + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1901.07518 + Title: 'Hybrid Task Cascade for Instance Segmentation' + README: configs/htc/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/htc.py#L6 + Version: v2.0.0 + +Models: + - Name: htc_r50_fpn_1x_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth + + - Name: htc_r50_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth + + - Name: htc_r101_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.2 + inference time (ms/im): + - value: 181.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth + + - Name: htc_x101_32x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 11.4 + inference time (ms/im): + - value: 200 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth + + - Name: htc_x101_64x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 14.5 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth + + - Name: htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth diff --git a/configs/mmdet/instaboost/README.md b/configs/mmdet/instaboost/README.md new file mode 100644 index 00000000..083a9e7b --- /dev/null +++ b/configs/mmdet/instaboost/README.md @@ -0,0 +1,58 @@ +# Instaboost + +> [Instaboost: Boosting instance segmentation via probability map guided copy-pasting](https://arxiv.org/abs/1908.07801) + + + +## Abstract + +Instance segmentation requires a large number of training samples to achieve satisfactory performance and benefits from proper data augmentation. To enlarge the training set and increase the diversity, previous methods have investigated using data annotation from other domain (e.g. bbox, point) in a weakly supervised mechanism. In this paper, we present a simple, efficient and effective method to augment the training set using the existing instance mask annotations. Exploiting the pixel redundancy of the background, we are able to improve the performance of Mask R-CNN for 1.7 mAP on COCO dataset and 3.3 mAP on Pascal VOC dataset by simply introducing random jittering to objects. Furthermore, we propose a location probability map based approach to explore the feasible locations that objects can be placed based on local appearance similarity. With the guidance of such map, we boost the performance of R101-Mask R-CNN on instance segmentation from 35.7 mAP to 37.9 mAP without modifying the backbone or network structure. Our method is simple to implement and does not increase the computational complexity. It can be integrated into the training pipeline of any instance segmentation model without affecting the training and inference efficiency. + +
+ +
+ +## Introduction + +Configs in this directory is the implementation for ICCV2019 paper "InstaBoost: Boosting Instance Segmentation Via Probability Map Guided Copy-Pasting" and provided by the authors of the paper. InstaBoost is a data augmentation method for object detection and instance segmentation. The paper has been released on [`arXiv`](https://arxiv.org/abs/1908.07801). + +## Usage + +### Requirements + +You need to install `instaboostfast` before using it. + +```shell +pip install instaboostfast +``` + +The code and more details can be found [here](https://github.com/GothicAi/Instaboost). + +### Integration with MMDetection + +InstaBoost have been already integrated in the data pipeline, thus all you need is to add or change **InstaBoost** configurations after **LoadImageFromFile**. We have provided examples like [this](mask_rcnn_r50_fpn_instaboost_4x#L121). You can refer to [`InstaBoostConfig`](https://github.com/GothicAi/InstaBoost-pypi#instaboostconfig) for more details. + +## Results and Models + +- All models were trained on `coco_2017_train` and tested on `coco_2017_val` for convenience of evaluation and comparison. In the paper, the results are obtained from `test-dev`. +- To balance accuracy and training time when using InstaBoost, models released in this page are all trained for 48 Epochs. Other training and testing configs strictly follow the original framework. +- For results and models in MMDetection V1.x, please refer to [Instaboost](https://github.com/GothicAi/Instaboost). + +| Network | Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :--------: | :-----: | :------: | :------------: | :------:| :-----: | :------: | :-----------------: | +| Mask R-CNN | R-50-FPN | 4x | 4.4 | 17.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223635.log.json) | +| Mask R-CNN | R-101-FPN | 4x | 6.4 | | 42.5 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738.log.json) | +| Mask R-CNN | X-101-64x4d-FPN | 4x | 10.7 | | 44.7 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947.log.json) | +| Cascade R-CNN | R-101-FPN | 4x | 6.0 | 12.0 | 43.7 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223646.log.json) | + +## Citation + +```latex +@inproceedings{fang2019instaboost, + title={Instaboost: Boosting instance segmentation via probability map guided copy-pasting}, + author={Fang, Hao-Shu and Sun, Jianhua and Wang, Runzhong and Gou, Minghao and Li, Yong-Lu and Lu, Cewu}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={682--691}, + year={2019} +} +``` diff --git a/configs/mmdet/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..9d0515d7 --- /dev/null +++ b/configs/mmdet/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..a89a81f5 --- /dev/null +++ b/configs/mmdet/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/configs/mmdet/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..d67b7992 --- /dev/null +++ b/configs/mmdet/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..ebbb43e9 --- /dev/null +++ b/configs/mmdet/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..55ca62b7 --- /dev/null +++ b/configs/mmdet/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/configs/mmdet/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/configs/mmdet/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 00000000..2010f448 --- /dev/null +++ b/configs/mmdet/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/instaboost/metafile.yml b/configs/mmdet/instaboost/metafile.yml new file mode 100644 index 00000000..325283d3 --- /dev/null +++ b/configs/mmdet/instaboost/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: InstaBoost + Metadata: + Training Data: COCO + Training Techniques: + - InstaBoost + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Paper: + URL: https://arxiv.org/abs/1908.07801 + Title: 'Instaboost: Boosting instance segmentation via probability map guided copy-pasting' + README: configs/instaboost/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/datasets/pipelines/instaboost.py#L7 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth + + - Name: mask_rcnn_r101_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth + + - Name: mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth + + - Name: cascade_mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth diff --git a/configs/mmdet/lad/README.md b/configs/mmdet/lad/README.md new file mode 100644 index 00000000..a5ded4f8 --- /dev/null +++ b/configs/mmdet/lad/README.md @@ -0,0 +1,44 @@ +# LAD + +> [Improving Object Detection by Label Assignment Distillation](https://arxiv.org/abs/2108.10520) + + + +## Abstract + +Label assignment in object detection aims to assign targets, foreground or background, to sampled regions in an image. Unlike labeling for image classification, this problem is not well defined due to the object's bounding box. In this paper, we investigate the problem from a perspective of distillation, hence we call Label Assignment Distillation (LAD). Our initial motivation is very simple, we use a teacher network to generate labels for the student. This can be achieved in two ways: either using the teacher's prediction as the direct targets (soft label), or through the hard labels dynamically assigned by the teacher (LAD). Our experiments reveal that: (i) LAD is more effective than soft-label, but they are complementary. (ii) Using LAD, a smaller teacher can also improve a larger student significantly, while soft-label can't. We then introduce Co-learning LAD, in which two networks simultaneously learn from scratch and the role of teacher and student are dynamically interchanged. Using PAA-ResNet50 as a teacher, our LAD techniques can improve detectors PAA-ResNet101 and PAA-ResNeXt101 to 46AP and 47.5AP on the COCO test-dev set. With a stronger teacher PAA-SwinB, we improve the students PAA-ResNet50 to 43.7AP by only 1x schedule training and standard setting, and PAA-ResNet101 to 47.9AP, significantly surpassing the current methods. + +
+ +
+ +## Results and Models + +We provide config files to reproduce the object detection results in the +WACV 2022 paper for Improving Object Detection by Label Assignment +Distillation. + +### PAA with LAD + +| Teacher | Student | Training schedule | AP (val) | Config | +| :-------: | :-----: | :---------------: | :------: | :----------------------------------------------------: | +| -- | R-50 | 1x | 40.4 | | +| -- | R-101 | 1x | 42.6 | | +| R-101 | R-50 | 1x | 41.6 | [config](configs/lad/lad_r50_paa_r101_fpn_coco_1x.py) | +| R-50 | R-101 | 1x | 43.2 | [config](configs/lad/lad_r101_paa_r50_fpn_coco_1x.py) | + +## Note + +- Meaning of Config name: lad_r50(student model)_paa(based on paa)_r101(teacher model)_fpn(neck)_coco(dataset)_1x(12 epoch).py +- Results may fluctuate by about 0.2 mAP. + +## Citation + +```latex +@inproceedings{nguyen2021improving, + title={Improving Object Detection by Label Assignment Distillation}, + author={Chuong H. Nguyen and Thuy C. Nguyen and Tuan N. Tang and Nam L. H. Phan}, + booktitle = {WACV}, + year={2022} +} +``` diff --git a/configs/mmdet/lad/lad_r101_paa_r50_fpn_coco_1x.py b/configs/mmdet/lad/lad_r101_paa_r50_fpn_coco_1x.py new file mode 100644 index 00000000..4877d95b --- /dev/null +++ b/configs/mmdet/lad/lad_r101_paa_r50_fpn_coco_1x.py @@ -0,0 +1,126 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth' # noqa +model = dict( + type='LAD', + # student + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # teacher + teacher_ckpt=teacher_ckpt, + teacher_backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + teacher_neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + teacher_bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + score_voting=True, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +data = dict(samples_per_gpu=8, workers_per_gpu=4) +optimizer = dict(lr=0.01) +fp16 = dict(loss_scale=512.) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/lad/lad_r50_paa_r101_fpn_coco_1x.py b/configs/mmdet/lad/lad_r50_paa_r101_fpn_coco_1x.py new file mode 100644 index 00000000..29bbe693 --- /dev/null +++ b/configs/mmdet/lad/lad_r50_paa_r101_fpn_coco_1x.py @@ -0,0 +1,125 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'http://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth' # noqa +model = dict( + type='LAD', + # student + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # teacher + teacher_ckpt=teacher_ckpt, + teacher_backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + teacher_neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + teacher_bbox_head=dict( + type='LADHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + score_voting=True, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +data = dict(samples_per_gpu=8, workers_per_gpu=4) +optimizer = dict(lr=0.01) +fp16 = dict(loss_scale=512.) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/lad/metafile.yml b/configs/mmdet/lad/metafile.yml new file mode 100644 index 00000000..5076f28d --- /dev/null +++ b/configs/mmdet/lad/metafile.yml @@ -0,0 +1,42 @@ +Collections: + - Name: Label Assignment Distillation + Metadata: + Training Data: COCO + Training Techniques: + - Label Assignment Distillation + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2108.10520 + Title: 'Improving Object Detection by Label Assignment Distillation' + README: configs/lad/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.19.0/mmdet/models/detectors/lad.py#L10 + Version: v2.19.0 + +Models: + - Name: lad_r50_paa_r101_fpn_coco_1x + In Collection: Label Assignment Distillation + Config: configs/lad/lad_r50_paa_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + + - Name: lad_r101_paa_r50_fpn_coco_1x + In Collection: Label Assignment Distillation + Config: configs/lad/lad_r101_paa_r50_fpn_coco_1x.py + Metadata: + Teacher: R-50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.2 diff --git a/configs/mmdet/ld/README.md b/configs/mmdet/ld/README.md new file mode 100644 index 00000000..d4c2fd17 --- /dev/null +++ b/configs/mmdet/ld/README.md @@ -0,0 +1,43 @@ +# LD + +> [Localization Distillation for Dense Object Detection](https://arxiv.org/abs/2102.12252) + + + +## Abstract + +Knowledge distillation (KD) has witnessed its powerful capability in learning compact models in object detection. Previous KD methods for object detection mostly focus on imitating deep features within the imitation regions instead of mimicking classification logits due to its inefficiency in distilling localization information. In this paper, by reformulating the knowledge distillation process on localization, we present a novel localization distillation (LD) method which can efficiently transfer the localization knowledge from the teacher to the student. Moreover, we also heuristically introduce the concept of valuable localization region that can aid to selectively distill the semantic and localization knowledge for a certain region. Combining these two new components, for the first time, we show that logit mimicking can outperform feature imitation and localization knowledge distillation is more important and efficient than semantic knowledge for distilling object detectors. Our distillation scheme is simple as well as effective and can be easily applied to different dense object detectors. Experiments show that our LD can boost the AP score of GFocal-ResNet-50 with a single-scale 1× training schedule from 40.1 to 42.1 on the COCO benchmark without any sacrifice on the inference speed. + +
+ +
+ +## Results and Models + +### GFocalV1 with LD + +| Teacher | Student | Training schedule | Mini-batch size | AP (val) | AP50 (val) | AP75 (val) | Config | +| :-------: | :-----: | :---------------: | :-------------: | :------: | :--------: | :--------: | :--------------: | +| -- | R-18 | 1x | 6 | 35.8 | 53.1 | 38.2 | | +| R-101 | R-18 | 1x | 6 | 36.5 | 52.9 | 39.3 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py) | +| -- | R-34 | 1x | 6 | 38.9 | 56.6 | 42.2 | | +| R-101 | R-34 | 1x | 6 | 39.8 | 56.6 | 43.1 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py) | +| -- | R-50 | 1x | 6 | 40.1 | 58.2 | 43.1 | | +| R-101 | R-50 | 1x | 6 | 41.1 | 58.7 | 44.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py) | +| -- | R-101 | 2x | 6 | 44.6 | 62.9 | 48.4 | | +| R-101-DCN | R-101 | 2x | 6 | 45.4 | 63.1 | 49.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py) | + +## Note + +- Meaning of Config name: ld_r18(student model)_gflv1(based on gflv1)_r101(teacher model)_fpn(neck)_coco(dataset)_1x(12 epoch).py + +## Citation + +```latex +@Inproceedings{zheng2022LD, + title={Localization Distillation for Dense Object Detection}, + author= {Zheng, Zhaohui and Ye, Rongguang and Wang, Ping and Ren, Dongwei and Zuo, Wangmeng and Hou, Qibin and Cheng, Mingming}, + booktitle={CVPR}, + year={2022} +} +``` diff --git a/configs/mmdet/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py b/configs/mmdet/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py new file mode 100644 index 00000000..1cbdb4cf --- /dev/null +++ b/configs/mmdet/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py @@ -0,0 +1,44 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth' # noqa +model = dict( + teacher_config='configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) + +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/configs/mmdet/ld/ld_r18_gflv1_r101_fpn_coco_1x.py b/configs/mmdet/ld/ld_r18_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 00000000..18dce814 --- /dev/null +++ b/configs/mmdet/ld/ld_r18_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth' # noqa +model = dict( + type='KnowledgeDistillationSingleStageDetector', + teacher_config='configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=18, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LDHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + loss_ld=dict( + type='KnowledgeDistillationKLDivLoss', loss_weight=0.25, T=10), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/ld/ld_r34_gflv1_r101_fpn_coco_1x.py b/configs/mmdet/ld/ld_r34_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 00000000..3b6996d4 --- /dev/null +++ b/configs/mmdet/ld/ld_r34_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=34, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet34')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/configs/mmdet/ld/ld_r50_gflv1_r101_fpn_coco_1x.py b/configs/mmdet/ld/ld_r50_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 00000000..2b18785a --- /dev/null +++ b/configs/mmdet/ld/ld_r50_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/configs/mmdet/ld/metafile.yml b/configs/mmdet/ld/metafile.yml new file mode 100644 index 00000000..d555a6df --- /dev/null +++ b/configs/mmdet/ld/metafile.yml @@ -0,0 +1,72 @@ +Collections: + - Name: Localization Distillation + Metadata: + Training Data: COCO + Training Techniques: + - Localization Distillation + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/2102.12252 + Title: 'Localization Distillation for Dense Object Detection' + README: configs/ld/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.11.0/mmdet/models/dense_heads/ld_head.py#L11 + Version: v2.11.0 + +Models: + - Name: ld_r18_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + box AP@0.5: 52.9 + box AP@0.75: 39.3 + + - Name: ld_r34_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + box AP@0.5: 56.6 + box AP@0.75: 43.1 + + - Name: ld_r50_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + box AP@0.5: 58.7 + box AP@0.75: 44.9 + + - Name: ld_r101_gflv1_r101dcn_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py + Metadata: + Teacher: R-101-DCN + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + box AP@0.5: 63.1 + box AP@0.75: 49.5 diff --git a/configs/mmdet/legacy_1.x/README.md b/configs/mmdet/legacy_1.x/README.md new file mode 100644 index 00000000..38a2a0e4 --- /dev/null +++ b/configs/mmdet/legacy_1.x/README.md @@ -0,0 +1,53 @@ +# Legacy Configs in MMDetection V1.x + + + +Configs in this directory implement the legacy configs used by MMDetection V1.x and its model zoos. + +To help users convert their models from V1.x to MMDetection V2.0, we provide v1.x configs to inference the converted v1.x models. +Due to the BC-breaking changes in MMDetection V2.0 from MMDetection V1.x, running inference with the same model weights in these two version will produce different results. The difference will cause within 1% AP absolute difference as can be found in the following table. + +## Usage + +To upgrade the model version, the users need to do the following steps. + +### 1. Convert model weights + +There are three main difference in the model weights between V1.x and V2.0 codebases. + +1. Since the class order in all the detector's classification branch is reordered, all the legacy model weights need to go through the conversion process. +2. The regression and segmentation head no longer contain the background channel. Weights in these background channels should be removed to fix in the current codebase. +3. For two-stage detectors, their wegihts need to be upgraded since MMDetection V2.0 refactors all the two-stage detectors with `RoIHead`. + +The users can do the same modification as mentioned above for the self-implemented +detectors. We provide a scripts `tools/model_converters/upgrade_model_version.py` to convert the model weights in the V1.x model zoo. + +```bash +python tools/model_converters/upgrade_model_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} --num-classes ${NUM_CLASSES} + +``` + +- OLD_MODEL_PATH: the path to load the model weights in 1.x version. +- NEW_MODEL_PATH: the path to save the converted model weights in 2.0 version. +- NUM_CLASSES: number of classes of the original model weights. Usually it is 81 for COCO dataset, 21 for VOC dataset. + The number of classes in V2.0 models should be equal to that in V1.x models - 1. + +### 2. Use configs with legacy settings + +After converting the model weights, checkout to the v1.2 release to find the corresponding config file that uses the legacy settings. +The V1.x models usually need these three legacy modules: `LegacyAnchorGenerator`, `LegacyDeltaXYWHBBoxCoder`, and `RoIAlign(align=False)`. +For models using ResNet Caffe backbones, they also need to change the pretrain name and the corresponding `img_norm_cfg`. +An example is in [`retinanet_r50_caffe_fpn_1x_coco_v1.py`](retinanet_r50_caffe_fpn_1x_coco_v1.py) +Then use the config to test the model weights. For most models, the obtained results should be close to that in V1.x. +We provide configs of some common structures in this directory. + +## Performance + +The performance change after converting the models in this directory are listed as the following. +| Method | Style | Lr schd | V1.x box AP | V1.x mask AP | V2.0 box AP | V2.0 mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------:| :-----: |:------:| :-----: | :-------: |:------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN R-50-FPN | pytorch | 1x | 37.3 | 34.2 | 36.8 | 33.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth)| +| RetinaNet R-50-FPN | caffe | 1x | 35.8 | - | 35.4 | - | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_caffe_1x_coco_v1.py) | +| RetinaNet R-50-FPN | pytorch | 1x | 35.6 |-|35.2| -| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/retinanet_r50_fpn_1x_20181125-7b0c2548.pth) | +| Cascade Mask R-CNN R-50-FPN | pytorch | 1x | 41.2 | 35.7 |40.8| 35.6| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/cascade_mask_rcnn_r50_fpn_1x_20181123-88b170c9.pth) | +| SSD300-VGG16 | caffe | 120e | 25.7 |-|25.4|-| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/ssd300_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/ssd300_coco_vgg16_caffe_120e_20181221-84d7110b.pth) | diff --git a/configs/mmdet/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py b/configs/mmdet/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 00000000..fc9d0048 --- /dev/null +++ b/configs/mmdet/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067])), + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +dist_params = dict(backend='nccl', port=29515) diff --git a/configs/mmdet/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py b/configs/mmdet/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 00000000..8c573bef --- /dev/null +++ b/configs/mmdet/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='FasterRCNN', + backbone=dict( + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + rpn_head=dict( + type='RPNHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/configs/mmdet/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py b/configs/mmdet/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 00000000..04581bbc --- /dev/null +++ b/configs/mmdet/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/configs/mmdet/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py b/configs/mmdet/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py new file mode 100644 index 00000000..a63d248c --- /dev/null +++ b/configs/mmdet/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py @@ -0,0 +1,41 @@ +_base_ = './retinanet_r50_fpn_1x_coco_v1.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py b/configs/mmdet/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py new file mode 100644 index 00000000..6198b971 --- /dev/null +++ b/configs/mmdet/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + type='RetinaHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) diff --git a/configs/mmdet/legacy_1.x/ssd300_coco_v1.py b/configs/mmdet/legacy_1.x/ssd300_coco_v1.py new file mode 100644 index 00000000..65ccc1e5 --- /dev/null +++ b/configs/mmdet/legacy_1.x/ssd300_coco_v1.py @@ -0,0 +1,84 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +input_size = 300 +model = dict( + bbox_head=dict( + type='SSDHead', + anchor_generator=dict( + type='LegacySSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +dist_params = dict(backend='nccl', port=29555) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/libra_rcnn/README.md b/configs/mmdet/libra_rcnn/README.md new file mode 100644 index 00000000..35446f6a --- /dev/null +++ b/configs/mmdet/libra_rcnn/README.md @@ -0,0 +1,53 @@ +# Libra R-CNN + +> [Libra R-CNN: Towards Balanced Learning for Object Detection](https://arxiv.org/abs/1904.02701) + + + +## Abstract + +Compared with model architectures, the training process, which is also crucial to the success of detectors, has received relatively less attention in object detection. In this work, we carefully revisit the standard training practice of detectors, and find that the detection performance is often limited by the imbalance during the training process, which generally consists in three levels - sample level, feature level, and objective level. To mitigate the adverse effects caused thereby, we propose Libra R-CNN, a simple but effective framework towards balanced learning for object detection. It integrates three novel components: IoU-balanced sampling, balanced feature pyramid, and balanced L1 loss, respectively for reducing the imbalance at sample, feature, and objective level. Benefitted from the overall balanced design, Libra R-CNN significantly improves the detection performance. Without bells and whistles, it achieves 2.5 points and 2.0 points higher Average Precision (AP) than FPN Faster R-CNN and RetinaNet respectively on MSCOCO. + +Instance recognition is rapidly advanced along with the developments of various deep convolutional neural networks. Compared to the architectures of networks, the training process, which is also crucial to the success of detectors, has received relatively less attention. In this work, we carefully revisit the standard training practice of detectors, and find that the detection performance is often limited by the imbalance during the training process, which generally consists in three levels - sample level, feature level, and objective level. To mitigate the adverse effects caused thereby, we propose Libra R-CNN, a simple yet effective framework towards balanced learning for instance recognition. It integrates IoU-balanced sampling, balanced feature pyramid, and objective re-weighting, respectively for reducing the imbalance at sample, feature, and objective level. Extensive experiments conducted on MS COCO, LVIS and Pascal VOC datasets prove the effectiveness of the overall balanced design. + +
+ +
+ +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 1x | 4.6 | 19.0 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| Fast R-CNN | R-50-FPN | pytorch | 1x | | | | | +| Faster R-CNN | R-101-FPN | pytorch | 1x | 6.5 | 14.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203_001405.log.json) | +| Faster R-CNN | X-101-64x4d-FPN | pytorch | 1x | 10.8 | 8.5 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315_231625.log.json) | +| RetinaNet | R-50-FPN | pytorch | 1x | 4.2 | 17.7 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205_112757.log.json) | + +## Citation + +We provide config files to reproduce the results in the CVPR 2019 paper [Libra R-CNN](https://arxiv.org/pdf/1904.02701.pdf). + +The extended version of [Libra R-CNN](https://arxiv.org/pdf/2108.10175.pdf) is accpeted by IJCV. + +```latex +@inproceedings{pang2019libra, + title={Libra R-CNN: Towards Balanced Learning for Object Detection}, + author={Pang, Jiangmiao and Chen, Kai and Shi, Jianping and Feng, Huajun and Ouyang, Wanli and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{pang2021towards, + title={Towards Balanced Learning for Instance Recognition}, + author={Pang, Jiangmiao and Chen, Kai and Li, Qi and Xu, Zhihai and Feng, Huajun and Shi, Jianping and Ouyang, Wanli and Lin, Dahua}, + journal={International Journal of Computer Vision}, + volume={129}, + number={5}, + pages={1376--1393}, + year={2021}, + publisher={Springer} +} +``` diff --git a/configs/mmdet/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..efbedc86 --- /dev/null +++ b/configs/mmdet/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +data = dict( + train=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_train2017.pkl'), + val=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'), + test=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl')) diff --git a/configs/mmdet/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..e899706b --- /dev/null +++ b/configs/mmdet/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..89a0d7b2 --- /dev/null +++ b/configs/mmdet/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict(sampler=dict(neg_pos_ub=5), allowed_border=-1), + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) diff --git a/configs/mmdet/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..06740a77 --- /dev/null +++ b/configs/mmdet/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py b/configs/mmdet/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 00000000..be274209 --- /dev/null +++ b/configs/mmdet/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,26 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=1, + refine_type='non_local') + ], + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=0.11, + loss_weight=1.0))) diff --git a/configs/mmdet/libra_rcnn/metafile.yml b/configs/mmdet/libra_rcnn/metafile.yml new file mode 100644 index 00000000..8c327959 --- /dev/null +++ b/configs/mmdet/libra_rcnn/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: Libra R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - IoU-Balanced Sampling + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Balanced Feature Pyramid + Paper: + URL: https://arxiv.org/abs/1904.02701 + Title: 'Libra R-CNN: Towards Balanced Learning for Object Detection' + README: configs/libra_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/bfp.py#L10 + Version: v2.0.0 + +Models: + - Name: libra_faster_rcnn_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + inference time (ms/im): + - value: 52.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth + + - Name: libra_faster_rcnn_r101_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 69.44 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth + + - Name: libra_faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth + + - Name: libra_retinanet_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 56.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth diff --git a/configs/mmdet/lvis/README.md b/configs/mmdet/lvis/README.md new file mode 100644 index 00000000..5c805648 --- /dev/null +++ b/configs/mmdet/lvis/README.md @@ -0,0 +1,54 @@ +# LVIS + +> [LVIS: A Dataset for Large Vocabulary Instance Segmentation](https://arxiv.org/abs/1908.03195) + + + +## Abstract + +Progress on object detection is enabled by datasets that focus the research community's attention on open challenges. This process led us from simple images to complex scenes and from bounding boxes to segmentation masks. In this work, we introduce LVIS (pronounced `el-vis'): a new dataset for Large Vocabulary Instance Segmentation. We plan to collect ~2 million high-quality instance segmentation masks for over 1000 entry-level object categories in 164k images. Due to the Zipfian distribution of categories in natural images, LVIS naturally has a long tail of categories with few training samples. Given that state-of-the-art deep learning methods for object detection perform poorly in the low-sample regime, we believe that our dataset poses an important and exciting new scientific challenge. + +
+ +
+ +## Common Setting + +* Please follow [install guide](../../docs/get_started.md#install-mmdetection) to install open-mmlab forked cocoapi first. +* Run following scripts to install our forked lvis-api. + + ```shell + pip install git+https://github.com/lvis-dataset/lvis-api.git + ``` + +* All experiments use oversample strategy [here](../../docs/tutorials/customize_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. +* The size of LVIS v0.5 is half of COCO, so schedule `2x` in LVIS is roughly the same iterations as `1x` in COCO. + +## Results and models of LVIS v0.5 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: |:--------: | +| R-50-FPN | pytorch | 2x | - | - | 26.1 | 25.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis-dbd06831.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_20200531_160435.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 27.1 | 27.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis-54582ee2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_20200601_134748.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 26.7 | 26.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis-3cf55ea2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_20200531_221749.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 26.4 | 26.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis-1c99a5ad.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_20200601_194651.log.json) | + +## Results and models of LVIS v1 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 9.1 | - | 22.5 | 21.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-aa78ac3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_061305.log.json) | +| R-101-FPN | pytorch | 1x | 10.8 | - | 24.6 | 23.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-ec55ce32.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_070959.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 11.8 | - | 26.7 | 25.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-ebbc5c81.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_071317.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 14.6 | - | 27.2 | 25.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-43d9edfe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200830_060206.log.json) | + +## Citation + +```latex +@inproceedings{gupta2019lvis, + title={{LVIS}: A Dataset for Large Vocabulary Instance Segmentation}, + author={Gupta, Agrim and Dollar, Piotr and Girshick, Ross}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` diff --git a/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 00000000..0f017f58 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 00000000..637f4a63 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 00000000..92ddb526 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1203), mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 00000000..d53c5dc6 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v0.5_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1230), mask_head=dict(num_classes=1230)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 00000000..a6115c1a --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 00000000..96b62523 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 00000000..0f95a732 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 00000000..986acda5 --- /dev/null +++ b/configs/mmdet/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/mask2former/README.md b/configs/mmdet/mask2former/README.md new file mode 100644 index 00000000..481e7593 --- /dev/null +++ b/configs/mmdet/mask2former/README.md @@ -0,0 +1,60 @@ +# Mask2Former + +> [Masked-attention Mask Transformer for Universal Image Segmentation](http://arxiv.org/abs/2112.01527) + + + +## Abstract + +Image segmentation is about grouping pixels with different semantics, e.g., category or instance membership, where each choice of semantics defines a task. While only the semantics of each task differ, current research focuses on designing specialized architectures for each task. We present Masked-attention Mask Transformer (Mask2Former), a new architecture capable of addressing any image segmentation task (panoptic, instance or semantic). Its key components include masked attention, which extracts localized features by constraining cross-attention within predicted mask regions. In addition to reducing the research effort by at least three times, it outperforms the best specialized architectures by a significant margin on four popular datasets. Most notably, Mask2Former sets a new state-of-the-art for panoptic segmentation (57.8 PQ on COCO), instance segmentation (50.1 AP on COCO) and semantic segmentation (57.7 mIoU on ADE20K). + +
+ +
+ +## Introduction + +Mask2Former requires COCO and [COCO-panoptic](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) dataset for training and evaluation. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +| | | ├── instances_train2017.json +| | | ├── instances_val2017.json +│ │ │ ├── panoptic_train2017.json +│ │ │ ├── panoptic_train2017 +│ │ │ ├── panoptic_val2017.json +│ │ │ ├── panoptic_val2017 +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +``` + +## Results and Models + +| Backbone | style | Pretrain | Lr schd | Mem (GB) | Inf time (fps) | PQ | box mAP | mask mAP | Config | Download | +| :------: | :-----: | :----------: | :-----: | :------: | :------------: | :---: | :-----: | :------: | :-------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| R-50 | pytorch | ImageNet-1K | 50e | 13.9 | - | 51.9 | 44.8 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_r50_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r50_lsj_8x2_50e_coco/mask2former_r50_lsj_8x2_50e_coco_20220326_224516-0091ce2b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r50_lsj_8x2_50e_coco/mask2former_r50_lsj_8x2_50e_coco_20220326_224516.log.json) | +| R-101 | pytorch | ImageNet-1K | 50e | 16.1 | - | 52.4 | 45.3 | 42.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_r101_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r101_lsj_8x2_50e_coco/mask2former_r101_lsj_8x2_50e_coco_20220329_225104-bb4df090.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r101_lsj_8x2_50e_coco/mask2former_r101_lsj_8x2_50e_coco_20220329_225104.log.json) | +| Swin-T | - | ImageNet-1K | 50e | 15.9 | - | 53.4 | 46.3 | 43.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco_20220326_224553-c92f921c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco_20220326_224553.log.json) | +| Swin-S | - | ImageNet-1K | 50e | 19.1 | - | 54.5 | 47.8 | 44.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco_20220329_225200-9f633bcf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco_20220329_225200.log.json) | +| Swin-B | - | ImageNet-1K | 50e | 26.0 | - | 55.1 | 48.2 | 44.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco_20220331_002244-1db756b2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco_20220331_002244.log.json) | +| Swin-B | - | ImageNet-21K | 50e | 25.8 | - | 56.3 | 50.0 | 46.3 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco_20220329_230021-89d7c1b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco_20220329_230021.log.json) | +| Swin-L | - | ImageNet-21K | 100e | 21.1 | - | 57.6 | 52.2 | 48.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco_20220407_104949-c481ee28.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco_20220407_104949.log.json) | + +## Citation + +```latex +@article{cheng2021mask2former, + title={Masked-attention Mask Transformer for Universal Image Segmentation}, + author={Bowen Cheng and Ishan Misra and Alexander G. Schwing and Alexander Kirillov and Rohit Girdhar}, + journal={arXiv}, + year={2021} +} +``` diff --git a/configs/mmdet/mask2former/mask2former_r101_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_r101_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..27050585 --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_r101_lsj_8x2_50e_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask2former_r50_lsj_8x2_50e_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/mask2former/mask2former_r50_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_r50_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..2c23625e --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_r50_lsj_8x2_50e_coco.py @@ -0,0 +1,253 @@ +_base_ = [ + '../_base_/datasets/coco_panoptic.py', '../_base_/default_runtime.py' +] +num_things_classes = 80 +num_stuff_classes = 53 +num_classes = num_things_classes + num_stuff_classes +model = dict( + type='Mask2Former', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + panoptic_head=dict( + type='Mask2FormerHead', + in_channels=[256, 512, 1024, 2048], # pass to pixel_decoder inside + strides=[4, 8, 16, 32], + feat_channels=256, + out_channels=256, + num_things_classes=num_things_classes, + num_stuff_classes=num_stuff_classes, + num_queries=100, + num_transformer_feat_level=3, + pixel_decoder=dict( + type='MSDeformAttnPixelDecoder', + num_outs=3, + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', + embed_dims=256, + num_heads=8, + num_levels=3, + num_points=4, + im2col_step=64, + dropout=0.0, + batch_first=False, + norm_cfg=None, + init_cfg=None), + ffn_cfgs=dict( + type='FFN', + embed_dims=256, + feedforward_channels=1024, + num_fcs=2, + ffn_drop=0.0, + act_cfg=dict(type='ReLU', inplace=True)), + operation_order=('self_attn', 'norm', 'ffn', 'norm')), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + init_cfg=None), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=9, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + attn_drop=0.0, + proj_drop=0.0, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=256, + feedforward_channels=2048, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.0, + dropout_layer=None, + add_identity=True), + feedforward_channels=2048, + operation_order=('cross_attn', 'norm', 'self_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=2.0, + reduction='mean', + class_weight=[1.0] * num_classes + [0.1]), + loss_mask=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + reduction='mean', + loss_weight=5.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=5.0)), + panoptic_fusion_head=dict( + type='MaskFormerFusionHead', + num_things_classes=num_things_classes, + num_stuff_classes=num_stuff_classes, + loss_panoptic=None, + init_cfg=None), + train_cfg=dict( + num_points=12544, + oversample_ratio=3.0, + importance_sample_ratio=0.75, + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=2.0), + mask_cost=dict( + type='CrossEntropyLossCost', weight=5.0, use_sigmoid=True), + dice_cost=dict( + type='DiceCost', weight=5.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + # For now, the dataset does not support + # evaluating semantic segmentation metric. + semantic_on=False, + instance_on=True, + # max_per_image is for instance segmentation. + max_per_image=100, + iou_thr=0.8, + # In Mask2Former's panoptic postprocessing, + # it will filter mask area where score is less than 0.5 . + filter_low_score=True), + init_cfg=None) + +# dataset settings +image_size = (1024, 1024) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict(type='RandomFlip', flip_ratio=0.5), + # large scale jittering + dict( + type='Resize', + img_scale=image_size, + ratio_range=(0.1, 2.0), + multiscale_mode='range', + keep_ratio=True), + dict( + type='RandomCrop', + crop_size=image_size, + crop_type='absolute', + recompute_bbox=True, + allow_negative_crop=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=image_size), + dict(type='DefaultFormatBundle', img_to_float=True), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data_root = 'data/coco/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict( + pipeline=test_pipeline, + ins_ann_file=data_root + 'annotations/instances_val2017.json', + ), + test=dict( + pipeline=test_pipeline, + ins_ann_file=data_root + 'annotations/instances_val2017.json', + )) + +embed_multi = dict(lr_mult=1.0, decay_mult=0.0) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.05, + eps=1e-8, + betas=(0.9, 0.999), + paramwise_cfg=dict( + custom_keys={ + 'backbone': dict(lr_mult=0.1, decay_mult=1.0), + 'query_embed': embed_multi, + 'query_feat': embed_multi, + 'level_embed': embed_multi, + }, + norm_decay_mult=0.0)) +optimizer_config = dict(grad_clip=dict(max_norm=0.01, norm_type=2)) + +# learning policy +lr_config = dict( + policy='step', + gamma=0.1, + by_epoch=False, + step=[327778, 355092], + warmup='linear', + warmup_by_epoch=False, + warmup_ratio=1.0, # no warmup + warmup_iters=10) + +max_iters = 368750 +runner = dict(type='IterBasedRunner', max_iters=max_iters) + +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook', by_epoch=False), + dict(type='TensorboardLoggerHook', by_epoch=False) + ]) +interval = 5000 +workflow = [('train', interval)] +checkpoint_config = dict( + by_epoch=False, interval=interval, save_last=True, max_keep_ckpts=3) + +# Before 365001th iteration, we do evaluation every 5000 iterations. +# After 365000th iteration, we do evaluation every 368750 iterations, +# which means that we do evaluation at the end of training. +dynamic_intervals = [(max_iters // interval * interval + 1, max_iters)] +evaluation = dict( + interval=interval, + dynamic_intervals=dynamic_intervals, + metric=['PQ', 'bbox', 'segm']) diff --git a/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..d0cf3762 --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py @@ -0,0 +1,5 @@ +_base_ = ['./mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py'] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384_22k.pth' # noqa + +model = dict( + backbone=dict(init_cfg=dict(type='Pretrained', checkpoint=pretrained))) diff --git a/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..d2a58259 --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py @@ -0,0 +1,42 @@ +_base_ = ['./mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py'] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_base_patch4_window12_384.pth' # noqa + +depths = [2, 2, 18, 2] +model = dict( + backbone=dict( + pretrain_img_size=384, + embed_dims=128, + depths=depths, + num_heads=[4, 8, 16, 32], + window_size=12, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + panoptic_head=dict(in_channels=[128, 256, 512, 1024])) + +# set all layers in backbone to lr_mult=0.1 +# set all norm layers, position_embeding, +# query_embeding, level_embeding to decay_multi=0.0 +backbone_norm_multi = dict(lr_mult=0.1, decay_mult=0.0) +backbone_embed_multi = dict(lr_mult=0.1, decay_mult=0.0) +embed_multi = dict(lr_mult=1.0, decay_mult=0.0) +custom_keys = { + 'backbone': dict(lr_mult=0.1, decay_mult=1.0), + 'backbone.patch_embed.norm': backbone_norm_multi, + 'backbone.norm': backbone_norm_multi, + 'absolute_pos_embed': backbone_embed_multi, + 'relative_position_bias_table': backbone_embed_multi, + 'query_embed': embed_multi, + 'query_feat': embed_multi, + 'level_embed': embed_multi +} +custom_keys.update({ + f'backbone.stages.{stage_id}.blocks.{block_id}.norm': backbone_norm_multi + for stage_id, num_blocks in enumerate(depths) + for block_id in range(num_blocks) +}) +custom_keys.update({ + f'backbone.stages.{stage_id}.downsample.norm': backbone_norm_multi + for stage_id in range(len(depths) - 1) +}) +# optimizer +optimizer = dict( + paramwise_cfg=dict(custom_keys=custom_keys, norm_decay_mult=0.0)) diff --git a/configs/mmdet/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py b/configs/mmdet/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py new file mode 100644 index 00000000..13aa28c4 --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py @@ -0,0 +1,26 @@ +_base_ = ['./mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py'] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window12_384_22k.pth' # noqa + +model = dict( + backbone=dict( + embed_dims=192, + num_heads=[6, 12, 24, 48], + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + panoptic_head=dict(num_queries=200, in_channels=[192, 384, 768, 1536])) + +data = dict(samples_per_gpu=1, workers_per_gpu=1) + +lr_config = dict(step=[655556, 710184]) + +max_iters = 737500 +runner = dict(type='IterBasedRunner', max_iters=max_iters) + +# Before 735001th iteration, we do evaluation every 5000 iterations. +# After 735000th iteration, we do evaluation every 737500 iterations, +# which means that we do evaluation at the end of training.' +interval = 5000 +dynamic_intervals = [(max_iters // interval * interval + 1, max_iters)] +evaluation = dict( + interval=interval, + dynamic_intervals=dynamic_intervals, + metric=['PQ', 'bbox', 'segm']) diff --git a/configs/mmdet/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..7b1b05ab --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py @@ -0,0 +1,37 @@ +_base_ = ['./mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py'] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_small_patch4_window7_224.pth' # noqa + +depths = [2, 2, 18, 2] +model = dict( + backbone=dict( + depths=depths, init_cfg=dict(type='Pretrained', + checkpoint=pretrained))) + +# set all layers in backbone to lr_mult=0.1 +# set all norm layers, position_embeding, +# query_embeding, level_embeding to decay_multi=0.0 +backbone_norm_multi = dict(lr_mult=0.1, decay_mult=0.0) +backbone_embed_multi = dict(lr_mult=0.1, decay_mult=0.0) +embed_multi = dict(lr_mult=1.0, decay_mult=0.0) +custom_keys = { + 'backbone': dict(lr_mult=0.1, decay_mult=1.0), + 'backbone.patch_embed.norm': backbone_norm_multi, + 'backbone.norm': backbone_norm_multi, + 'absolute_pos_embed': backbone_embed_multi, + 'relative_position_bias_table': backbone_embed_multi, + 'query_embed': embed_multi, + 'query_feat': embed_multi, + 'level_embed': embed_multi +} +custom_keys.update({ + f'backbone.stages.{stage_id}.blocks.{block_id}.norm': backbone_norm_multi + for stage_id, num_blocks in enumerate(depths) + for block_id in range(num_blocks) +}) +custom_keys.update({ + f'backbone.stages.{stage_id}.downsample.norm': backbone_norm_multi + for stage_id in range(len(depths) - 1) +}) +# optimizer +optimizer = dict( + paramwise_cfg=dict(custom_keys=custom_keys, norm_decay_mult=0.0)) diff --git a/configs/mmdet/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py b/configs/mmdet/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py new file mode 100644 index 00000000..70e3103e --- /dev/null +++ b/configs/mmdet/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py @@ -0,0 +1,62 @@ +_base_ = ['./mask2former_r50_lsj_8x2_50e_coco.py'] +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa + +depths = [2, 2, 6, 2] +model = dict( + type='Mask2Former', + backbone=dict( + _delete_=True, + type='SwinTransformer', + embed_dims=96, + depths=depths, + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4, + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.3, + patch_norm=True, + out_indices=(0, 1, 2, 3), + with_cp=False, + convert_weights=True, + frozen_stages=-1, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + panoptic_head=dict( + type='Mask2FormerHead', in_channels=[96, 192, 384, 768]), + init_cfg=None) + +# set all layers in backbone to lr_mult=0.1 +# set all norm layers, position_embeding, +# query_embeding, level_embeding to decay_multi=0.0 +backbone_norm_multi = dict(lr_mult=0.1, decay_mult=0.0) +backbone_embed_multi = dict(lr_mult=0.1, decay_mult=0.0) +embed_multi = dict(lr_mult=1.0, decay_mult=0.0) +custom_keys = { + 'backbone': dict(lr_mult=0.1, decay_mult=1.0), + 'backbone.patch_embed.norm': backbone_norm_multi, + 'backbone.norm': backbone_norm_multi, + 'absolute_pos_embed': backbone_embed_multi, + 'relative_position_bias_table': backbone_embed_multi, + 'query_embed': embed_multi, + 'query_feat': embed_multi, + 'level_embed': embed_multi +} +custom_keys.update({ + f'backbone.stages.{stage_id}.blocks.{block_id}.norm': backbone_norm_multi + for stage_id, num_blocks in enumerate(depths) + for block_id in range(num_blocks) +}) +custom_keys.update({ + f'backbone.stages.{stage_id}.downsample.norm': backbone_norm_multi + for stage_id in range(len(depths) - 1) +}) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.05, + eps=1e-8, + betas=(0.9, 0.999), + paramwise_cfg=dict(custom_keys=custom_keys, norm_decay_mult=0.0)) diff --git a/configs/mmdet/mask2former/metafile.yml b/configs/mmdet/mask2former/metafile.yml new file mode 100644 index 00000000..2ceed805 --- /dev/null +++ b/configs/mmdet/mask2former/metafile.yml @@ -0,0 +1,159 @@ +Collections: + - Name: Mask2Former + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Weight Decay + Training Resources: 8x A100 GPUs + Architecture: + - Mask2Former + Paper: + URL: https://arxiv.org/pdf/2112.01527 + Title: 'Masked-attention Mask Transformer for Universal Image Segmentation' + README: configs/mask2former/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.23.0/mmdet/models/detectors/mask2former.py#L7 + Version: v2.23.0 + +Models: +- Name: mask2former_r50_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_r50_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 13.9 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.9 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 51.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r50_lsj_8x2_50e_coco/mask2former_r50_lsj_8x2_50e_coco_20220326_224516-0091ce2b.pth +- Name: mask2former_r101_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_r101_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 16.1 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.4 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 52.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_r101_lsj_8x2_50e_coco/mask2former_r101_lsj_8x2_50e_coco_20220329_225104-bb4df090.pth +- Name: mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 15.9 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 43.4 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 53.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-t-p4-w7-224_lsj_8x2_50e_coco_20220326_224553-c92f921c.pth +- Name: mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 19.1 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 44.5 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 54.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco/mask2former_swin-s-p4-w7-224_lsj_8x2_50e_coco_20220329_225200-9f633bcf.pth +- Name: mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 26.0 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 44.9 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 55.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384_lsj_8x2_50e_coco_20220331_002244-1db756b2.pth +- Name: mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco.py + Metadata: + Training Memory (GB): 25.8 + Iterations: 368750 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 46.3 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 56.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco/mask2former_swin-b-p4-w12-384-in21k_lsj_8x2_50e_coco_20220329_230021-89d7c1b1.pth +- Name: mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco + In Collection: Mask2Former + Config: configs/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco.py + Metadata: + Training Memory (GB): 21.1 + Iterations: 737500 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 52.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 48.5 + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 57.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask2former/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco/mask2former_swin-l-p4-w12-384-in21k_lsj_16x1_100e_coco_20220407_104949-c481ee28.pth diff --git a/configs/mmdet/mask_rcnn/README.md b/configs/mmdet/mask_rcnn/README.md new file mode 100644 index 00000000..9336dd35 --- /dev/null +++ b/configs/mmdet/mask_rcnn/README.md @@ -0,0 +1,59 @@ +# Mask R-CNN + +> [Mask R-CNN](https://arxiv.org/abs/1703.06870) + + + +## Abstract + +We present a conceptually simple, flexible, and general framework for object instance segmentation. Our approach efficiently detects objects in an image while simultaneously generating a high-quality segmentation mask for each instance. The method, called Mask R-CNN, extends Faster R-CNN by adding a branch for predicting an object mask in parallel with the existing branch for bounding box recognition. Mask R-CNN is simple to train and adds only a small overhead to Faster R-CNN, running at 5 fps. Moreover, Mask R-CNN is easy to generalize to other tasks, e.g., allowing us to estimate human poses in the same framework. We show top results in all three tracks of the COCO suite of challenges, including instance segmentation, bounding-box object detection, and person keypoint detection. Without bells and whistles, Mask R-CNN outperforms all existing, single-model entries on every task, including the COCO 2016 challenge winners. We hope our simple and effective approach will serve as a solid baseline and help ease future research in instance-level recognition. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.3 | | 38.0 | 34.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_20200504_231812.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN (FP16) | pytorch | 1x | 3.6 | 24.1 | 38.1 | 34.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205_130539.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 39.2 | 35.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_20200505_003907.log.json) | +| R-101-FPN | caffe | 1x | | | 40.4 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758.log.json)| +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 40.8 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_20200505_071027.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 11.3 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_20200506_004702.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | 8.0 | 42.8 | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201_124310.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 42.7 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208.log.json)| +| X-101-32x8d-FPN | pytorch | 1x | - | - | 42.8 | 38.3 | | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | caffe | 2x | 4.3 | | 40.3 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_20200504_231822.log.json) +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 4.3 | | 40.8 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_20200504_163245.log.json) +| [R-50-FPN](./mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 4.1 | | 40.9 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154.log.json) +| [R-101-FPN](./mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 5.9 | | 42.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339.log.json) +| [R-101-FPN](./mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 6.1 | | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244.log.json) +| [x101-32x4d-FPN](./mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 7.3 | | 43.6 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410.log.json) +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 1x | - | | 43.6 | 39.0 | +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.3 | | 44.3 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042.log.json) +| [X-101-64x4d-FPN](./mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.4 | | 44.5 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447.log.json) + +## Citation + +```latex +@article{He_2017, + title={Mask R-CNN}, + journal={2017 IEEE International Conference on Computer Vision (ICCV)}, + publisher={IEEE}, + author={He, Kaiming and Gkioxari, Georgia and Dollar, Piotr and Girshick, Ross}, + year={2017}, + month={Oct} +} +``` diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..95b324f5 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..e39781dc --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,55 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..b7986e85 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 00000000..c9059d53 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..0696cbe7 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,10 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 00000000..a44c0183 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..5a23f8c7 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,40 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 00000000..6308e404 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,49 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py new file mode 100644 index 00000000..4f7150ca --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..1b48a210 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 00000000..bebbaaab --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py new file mode 100644 index 00000000..3f8079d3 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py @@ -0,0 +1,61 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..6a6c9246 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 00000000..932b1f90 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 00000000..fb8289b0 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..b3d9242c --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py new file mode 100644 index 00000000..9eb6d57e --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..a8b3799b --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 00000000..2cd3cee5 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..b698a7d2 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py new file mode 100644 index 00000000..108ea4e3 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 00000000..6b912f69 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..8ba0e9c2 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,85 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..2333b03a --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 00000000..6074cca2 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..9f9cb1c4 --- /dev/null +++ b/configs/mmdet/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/mask_rcnn/metafile.yml b/configs/mmdet/mask_rcnn/metafile.yml new file mode 100644 index 00000000..f74bdf30 --- /dev/null +++ b/configs/mmdet/mask_rcnn/metafile.yml @@ -0,0 +1,447 @@ +Collections: + - Name: Mask R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Softmax + - RPN + - Convolution + - Dense Connections + - FPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1703.06870v3 + Title: "Mask R-CNN" + README: configs/mask_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/mask_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth + + - Name: mask_rcnn_r50_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth + + - Name: mask_rcnn_r50_fpn_fp16_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + Training Techniques: + - SGD with Momentum + - Weight Decay + - Mixed Precision Training + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth + + - Name: mask_rcnn_r50_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth + + - Name: mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth + + - Name: mask_rcnn_r101_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth + + - Name: mask_rcnn_r101_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth + + - Name: mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth + + - Name: mask_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth + + - Name: mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth + + - Name: mask_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth + + - Name: mask_rcnn_x101_32x8d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth + + - Name: mask_rcnn_r50_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth + + - Name: mask_rcnn_r101_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth + + - Name: mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth + + - Name: mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + Metadata: + Training Memory (GB): 10.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth + + - Name: mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Epochs: 36 + Training Memory (GB): 10.4 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth diff --git a/configs/mmdet/maskformer/README.md b/configs/mmdet/maskformer/README.md new file mode 100644 index 00000000..fa394f90 --- /dev/null +++ b/configs/mmdet/maskformer/README.md @@ -0,0 +1,52 @@ +# MaskFormer + +> [Per-Pixel Classification is Not All You Need for Semantic Segmentation](https://arxiv.org/abs/2107.06278) + + + +## Abstract + +Modern approaches typically formulate semantic segmentation as a per-pixel classification task, while instance-level segmentation is handled with an alternative mask classification. Our key insight: mask classification is sufficiently general to solve both semantic- and instance-level segmentation tasks in a unified manner using the exact same model, loss, and training procedure. Following this observation, we propose MaskFormer, a simple mask classification model which predicts a set of binary masks, each associated with a single global class label prediction. Overall, the proposed mask classification-based method simplifies the landscape of effective approaches to semantic and panoptic segmentation tasks and shows excellent empirical results. In particular, we observe that MaskFormer outperforms per-pixel classification baselines when the number of classes is large. Our mask classification-based method outperforms both current state-of-the-art semantic (55.6 mIoU on ADE20K) and panoptic segmentation (52.7 PQ on COCO) models. + +
+ +
+ +## Introduction + +MaskFormer requires COCO and [COCO-panoptic](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) dataset for training and evaluation. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ │ ├── panoptic_train2017.json +│ │ │ ├── panoptic_train2017 +│ │ │ ├── panoptic_val2017.json +│ │ │ ├── panoptic_val2017 +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | PQ | SQ | RQ | PQ_th | SQ_th | RQ_th | PQ_st | SQ_st | RQ_st | Config | Download | detail | +|:--------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:------:|:------:|:------:|:------:|:------:|:------:|:------:|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------:|:-------------------------------------------------------------------------------------------------------------------------------------------------------:| +| R-50 | pytorch | 75e | 16.2 | - | 46.854 | 80.617 | 57.085 | 51.089 | 81.511 | 61.853 | 40.463 | 79.269 | 49.888 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_r50_mstrain_16x1_75e_coco/maskformer_r50_mstrain_16x1_75e_coco_20220221_141956-bc2699cb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_r50_mstrain_16x1_75e_coco/maskformer_r50_mstrain_16x1_75e_coco_20220221_141956.log.json) | This version was mentioned in Table XI, in paper [Masked-attention Mask Transformer for Universal Image Segmentation](https://arxiv.org/abs/2112.01527) | +| Swin-L | pytorch | 300e | 27.2 | - | 53.249 | 81.704 | 64.231 | 58.798 | 82.923 | 70.282 | 44.874 | 79.863 | 55.097 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco_20220326_221612-061b4eb8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco_20220326_221612.log.json) | - | +## Citation + +```latex +@inproceedings{cheng2021maskformer, + title={Per-Pixel Classification is Not All You Need for Semantic Segmentation}, + author={Bowen Cheng and Alexander G. Schwing and Alexander Kirillov}, + journal={NeurIPS}, + year={2021} +} +``` diff --git a/configs/mmdet/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py b/configs/mmdet/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py new file mode 100644 index 00000000..46b3c135 --- /dev/null +++ b/configs/mmdet/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py @@ -0,0 +1,238 @@ +_base_ = [ + '../_base_/datasets/coco_panoptic.py', '../_base_/default_runtime.py' +] +num_things_classes = 80 +num_stuff_classes = 53 +num_classes = num_things_classes + num_stuff_classes +model = dict( + type='MaskFormer', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + panoptic_head=dict( + type='MaskFormerHead', + in_channels=[256, 512, 1024, 2048], # pass to pixel_decoder inside + feat_channels=256, + out_channels=256, + num_things_classes=num_things_classes, + num_stuff_classes=num_stuff_classes, + num_queries=100, + pixel_decoder=dict( + type='TransformerEncoderPixelDecoder', + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU'), + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + attn_drop=0.1, + proj_drop=0.1, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=256, + feedforward_channels=2048, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.1, + dropout_layer=None, + add_identity=True), + operation_order=('self_attn', 'norm', 'ffn', 'norm'), + norm_cfg=dict(type='LN'), + init_cfg=None, + batch_first=False), + init_cfg=None), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True)), + enforce_decoder_input_project=False, + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + transformer_decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + attn_drop=0.1, + proj_drop=0.1, + dropout_layer=None, + batch_first=False), + ffn_cfgs=dict( + embed_dims=256, + feedforward_channels=2048, + num_fcs=2, + act_cfg=dict(type='ReLU', inplace=True), + ffn_drop=0.1, + dropout_layer=None, + add_identity=True), + # the following parameter was not used, + # just make current api happy + feedforward_channels=2048, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')), + init_cfg=None), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0, + reduction='mean', + class_weight=[1.0] * num_classes + [0.1]), + loss_mask=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=20.0), + loss_dice=dict( + type='DiceLoss', + use_sigmoid=True, + activate=True, + reduction='mean', + naive_dice=True, + eps=1.0, + loss_weight=1.0)), + panoptic_fusion_head=dict( + type='MaskFormerFusionHead', + num_things_classes=num_things_classes, + num_stuff_classes=num_stuff_classes, + loss_panoptic=None, + init_cfg=None), + train_cfg=dict( + assigner=dict( + type='MaskHungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=1.0), + mask_cost=dict( + type='FocalLossCost', weight=20.0, binary_input=True), + dice_cost=dict( + type='DiceCost', weight=1.0, pred_act=True, eps=1.0)), + sampler=dict(type='MaskPseudoSampler')), + test_cfg=dict( + panoptic_on=True, + # For now, the dataset does not support + # evaluating semantic segmentation metric. + semantic_on=False, + instance_on=False, + # max_per_image is for instance segmentation. + max_per_image=100, + object_mask_thr=0.8, + iou_thr=0.8, + # In MaskFormer's panoptic postprocessing, + # it will not filter masks whose score is smaller than 0.5 . + filter_low_score=False), + init_cfg=None) + +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=1, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + eps=1e-8, + betas=(0.9, 0.999), + paramwise_cfg=dict( + custom_keys={ + 'backbone': dict(lr_mult=0.1, decay_mult=1.0), + 'query_embed': dict(lr_mult=1.0, decay_mult=0.0) + }, + norm_decay_mult=0.0)) +optimizer_config = dict(grad_clip=dict(max_norm=0.01, norm_type=2)) + +# learning policy +lr_config = dict( + policy='step', + gamma=0.1, + by_epoch=True, + step=[50], + warmup='linear', + warmup_by_epoch=False, + warmup_ratio=1.0, # no warmup + warmup_iters=10) +runner = dict(type='EpochBasedRunner', max_epochs=75) diff --git a/configs/mmdet/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py b/configs/mmdet/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py new file mode 100644 index 00000000..bc23c54d --- /dev/null +++ b/configs/mmdet/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py @@ -0,0 +1,67 @@ +_base_ = './maskformer_r50_mstrain_16x1_75e_coco.py' + +pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_large_patch4_window12_384_22k.pth' # noqa +depths = [2, 2, 18, 2] +model = dict( + backbone=dict( + _delete_=True, + type='SwinTransformer', + pretrain_img_size=384, + embed_dims=192, + patch_size=4, + window_size=12, + mlp_ratio=4, + depths=depths, + num_heads=[6, 12, 24, 48], + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.3, + patch_norm=True, + out_indices=(0, 1, 2, 3), + with_cp=False, + convert_weights=True, + init_cfg=dict(type='Pretrained', checkpoint=pretrained)), + panoptic_head=dict( + in_channels=[192, 384, 768, 1536], # pass to pixel_decoder inside + pixel_decoder=dict( + _delete_=True, + type='PixelDecoder', + norm_cfg=dict(type='GN', num_groups=32), + act_cfg=dict(type='ReLU')), + enforce_decoder_input_project=True)) + +# weight_decay = 0.01 +# norm_weight_decay = 0.0 +# embed_weight_decay = 0.0 +embed_multi = dict(lr_mult=1.0, decay_mult=0.0) +norm_multi = dict(lr_mult=1.0, decay_mult=0.0) +custom_keys = { + 'norm': norm_multi, + 'absolute_pos_embed': embed_multi, + 'relative_position_bias_table': embed_multi, + 'query_embed': embed_multi +} + +# optimizer +optimizer = dict( + type='AdamW', + lr=6e-5, + weight_decay=0.01, + eps=1e-8, + betas=(0.9, 0.999), + paramwise_cfg=dict(custom_keys=custom_keys, norm_decay_mult=0.0)) +optimizer_config = dict(grad_clip=dict(max_norm=0.01, norm_type=2)) + +# learning policy +lr_config = dict( + policy='step', + gamma=0.1, + by_epoch=True, + step=[250], + warmup='linear', + warmup_by_epoch=False, + warmup_ratio=1e-6, + warmup_iters=1500) +runner = dict(type='EpochBasedRunner', max_epochs=300) diff --git a/configs/mmdet/maskformer/metafile.yml b/configs/mmdet/maskformer/metafile.yml new file mode 100644 index 00000000..6530fa14 --- /dev/null +++ b/configs/mmdet/maskformer/metafile.yml @@ -0,0 +1,43 @@ +Collections: + - Name: MaskFormer + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Weight Decay + Training Resources: 16x V100 GPUs + Architecture: + - MaskFormer + Paper: + URL: https://arxiv.org/pdf/2107.06278 + Title: 'Per-Pixel Classification is Not All You Need for Semantic Segmentation' + README: configs/maskformer/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.22.0/mmdet/models/detectors/maskformer.py#L7 + Version: v2.22.0 + +Models: + - Name: maskformer_r50_mstrain_16x1_75e_coco + In Collection: MaskFormer + Config: configs/maskformer/maskformer_r50_mstrain_16x1_75e_coco.py + Metadata: + Training Memory (GB): 16.2 + Epochs: 75 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 46.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_r50_mstrain_16x1_75e_coco/maskformer_r50_mstrain_16x1_75e_coco_20220221_141956-bc2699cb.pth + - Name: maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco + In Collection: MaskFormer + Config: configs/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco.py + Metadata: + Training Memory (GB): 27.2 + Epochs: 300 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 53.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/maskformer/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco/maskformer_swin-l-p4-w12_mstrain_64x1_300e_coco_20220326_221612-061b4eb8.pth diff --git a/configs/mmdet/ms_rcnn/README.md b/configs/mmdet/ms_rcnn/README.md new file mode 100644 index 00000000..44508c06 --- /dev/null +++ b/configs/mmdet/ms_rcnn/README.md @@ -0,0 +1,36 @@ +# MS R-CNN + +> [Mask Scoring R-CNN](https://arxiv.org/abs/1903.00241) + + + +## Abstract + +Letting a deep network be aware of the quality of its own predictions is an interesting yet important problem. In the task of instance segmentation, the confidence of instance classification is used as mask quality score in most instance segmentation frameworks. However, the mask quality, quantified as the IoU between the instance mask and its ground truth, is usually not well correlated with classification score. In this paper, we study this problem and propose Mask Scoring R-CNN which contains a network block to learn the quality of the predicted instance masks. The proposed network block takes the instance feature and the corresponding predicted mask together to regress the mask IoU. The mask scoring strategy calibrates the misalignment between mask quality and mask score, and improves instance segmentation performance by prioritizing more accurate mask predictions during COCO AP evaluation. By extensive evaluations on the COCO dataset, Mask Scoring R-CNN brings consistent and noticeable gain with different models, and outperforms the state-of-the-art Mask R-CNN. We hope our simple and effective approach will provide a new direction for improving instance segmentation. + +
+ +
+ +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.5 | | 38.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848.log.json) | +| R-50-FPN | caffe | 2x | - | - | 38.8 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_20200506_004738.log.json) | +| R-101-FPN | caffe | 1x | 6.5 | | 40.4 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_20200506_004755.log.json) | +| R-101-FPN | caffe | 2x | - | - | 41.1 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_20200506_011134.log.json) | +| R-X101-32x4d | pytorch | 2x | 7.9 | 11.0 | 41.8 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206_100113.log.json) | +| R-X101-64x4d | pytorch | 1x | 11.0 | 8.0 | 43.0 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206_091744.log.json) | +| R-X101-64x4d | pytorch | 2x | 11.0 | 8.0 | 42.6 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308_012247.log.json) | + +## Citation + +```latex +@inproceedings{huang2019msrcnn, + title={Mask Scoring R-CNN}, + author={Zhaojin Huang and Lichao Huang and Yongchao Gong and Chang Huang and Xinggang Wang}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019}, +} +``` diff --git a/configs/mmdet/ms_rcnn/metafile.yml b/configs/mmdet/ms_rcnn/metafile.yml new file mode 100644 index 00000000..a6c7dc59 --- /dev/null +++ b/configs/mmdet/ms_rcnn/metafile.yml @@ -0,0 +1,159 @@ +Collections: + - Name: Mask Scoring R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - FPN + - ResNet + - RoIAlign + Paper: + URL: https://arxiv.org/abs/1903.00241 + Title: 'Mask Scoring R-CNN' + README: configs/ms_rcnn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/mask_scoring_rcnn.py#L6 + Version: v2.0.0 + +Models: + - Name: ms_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth + + - Name: ms_rcnn_r50_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth + + - Name: ms_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth + + - Name: ms_rcnn_r101_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth + + - Name: ms_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth + + - Name: ms_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth + + - Name: ms_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..9b7dcbbf --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py new file mode 100644 index 00000000..202bcced --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r101_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 00000000..5845125a --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py new file mode 100644 index 00000000..008a70ae --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..0a163ce4 --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..20479bbd --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 00000000..ee5b7341 --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py b/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 00000000..54c605b9 --- /dev/null +++ b/configs/mmdet/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_x101_64x4d_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/nas_fcos/README.md b/configs/mmdet/nas_fcos/README.md new file mode 100644 index 00000000..74453c6b --- /dev/null +++ b/configs/mmdet/nas_fcos/README.md @@ -0,0 +1,35 @@ +# NAS-FCOS + +> [NAS-FCOS: Fast Neural Architecture Search for Object Detection](https://arxiv.org/abs/1906.04423) + + + +## Abstract + +The success of deep neural networks relies on significant architecture engineering. Recently neural architecture search (NAS) has emerged as a promise to greatly reduce manual effort in network design by automatically searching for optimal architectures, although typically such algorithms need an excessive amount of computational resources, e.g., a few thousand GPU-days. To date, on challenging vision tasks such as object detection, NAS, especially fast versions of NAS, is less studied. Here we propose to search for the decoder structure of object detectors with search efficiency being taken into consideration. To be more specific, we aim to efficiently search for the feature pyramid network (FPN) as well as the prediction head of a simple anchor-free object detector, namely FCOS, using a tailored reinforcement learning paradigm. With carefully designed search space, search algorithms and strategies for evaluating network quality, we are able to efficiently search a top-performing detection architecture within 4 days using 8 V100 GPUs. The discovered architecture surpasses state-of-the-art object detection models (such as Faster R-CNN, RetinaNet and FCOS) by 1.5 to 3.5 points in AP on the COCO dataset, with comparable computation complexity and memory footprint, demonstrating the efficacy of the proposed NAS for object detection. + +
+ +
+ +## Results and Models + +| Head | Backbone | Style | GN-head | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:---------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| NAS-FCOSHead | R-50 | caffe | Y | 1x | | | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520.log.json) | +| FCOSHead | R-50 | caffe | Y | 1x | | | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521.log.json) | + +**Notes:** + +- To be consistent with the author's implementation, we use 4 GPUs with 4 images/GPU. + +## Citation + +```latex +@article{wang2019fcos, + title={Nas-fcos: Fast neural architecture search for object detection}, + author={Wang, Ning and Gao, Yang and Chen, Hao and Wang, Peng and Tian, Zhi and Shen, Chunhua}, + journal={arXiv preprint arXiv:1906.04423}, + year={2019} +} +``` diff --git a/configs/mmdet/nas_fcos/metafile.yml b/configs/mmdet/nas_fcos/metafile.yml new file mode 100644 index 00000000..1ea28cfc --- /dev/null +++ b/configs/mmdet/nas_fcos/metafile.yml @@ -0,0 +1,44 @@ +Collections: + - Name: NAS-FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - NAS-FCOS + - ResNet + Paper: + URL: https://arxiv.org/abs/1906.04423 + Title: 'NAS-FCOS: Fast Neural Architecture Search for Object Detection' + README: configs/nas_fcos/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/detectors/nasfcos.py#L6 + Version: v2.1.0 + +Models: + - Name: nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth + + - Name: nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth diff --git a/configs/mmdet/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/configs/mmdet/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 00000000..a455c928 --- /dev/null +++ b/configs/mmdet/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,100 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/configs/mmdet/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/configs/mmdet/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 00000000..b7794925 --- /dev/null +++ b/configs/mmdet/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,99 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='NASFCOSHead', + num_classes=80, + in_channels=256, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/configs/mmdet/nas_fpn/README.md b/configs/mmdet/nas_fpn/README.md new file mode 100644 index 00000000..7b39eec5 --- /dev/null +++ b/configs/mmdet/nas_fpn/README.md @@ -0,0 +1,36 @@ +# NAS-FPN + +> [NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection](https://arxiv.org/abs/1904.07392) + + + +## Abstract + +Current state-of-the-art convolutional architectures for object detection are manually designed. Here we aim to learn a better architecture of feature pyramid network for object detection. We adopt Neural Architecture Search and discover a new feature pyramid architecture in a novel scalable search space covering all cross-scale connections. The discovered architecture, named NAS-FPN, consists of a combination of top-down and bottom-up connections to fuse features across scales. NAS-FPN, combined with various backbone models in the RetinaNet framework, achieves better accuracy and latency tradeoff compared to state-of-the-art object detection models. NAS-FPN improves mobile detection accuracy by 2 AP compared to state-of-the-art SSDLite with MobileNetV2 model in [32] and achieves 48.3 AP which surpasses Mask R-CNN [10] detection accuracy with less computation time. + +
+ +
+ +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. RetinaNet is used in the paper. + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50-FPN | 50e | 12.9 | 22.9 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco_20200529_095329.log.json) | +| R-50-NASFPN | 50e | 13.2 | 23.0 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco_20200528_230008.log.json) | + +**Note**: We find that it is unstable to train NAS-FPN and there is a small chance that results can be 3% mAP lower. + +## Citation + +```latex +@inproceedings{ghiasi2019fpn, + title={Nas-fpn: Learning scalable feature pyramid architecture for object detection}, + author={Ghiasi, Golnaz and Lin, Tsung-Yi and Le, Quoc V}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={7036--7045}, + year={2019} +} +``` diff --git a/configs/mmdet/nas_fpn/metafile.yml b/configs/mmdet/nas_fpn/metafile.yml new file mode 100644 index 00000000..ab8d6497 --- /dev/null +++ b/configs/mmdet/nas_fpn/metafile.yml @@ -0,0 +1,59 @@ +Collections: + - Name: NAS-FPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - NAS-FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.07392 + Title: 'NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection' + README: configs/nas_fpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/nas_fpn.py#L67 + Version: v2.0.0 + +Models: + - Name: retinanet_r50_fpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 12.9 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth + + - Name: retinanet_r50_nasfpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 13.2 + inference time (ms/im): + - value: 43.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth diff --git a/configs/mmdet/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py b/configs/mmdet/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py new file mode 100644 index 00000000..e4408fe8 --- /dev/null +++ b/configs/mmdet/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,85 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + relu_before_extra_convs=True, + no_norm_on_lateral=True, + norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py b/configs/mmdet/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py new file mode 100644 index 00000000..1387a10f --- /dev/null +++ b/configs/mmdet/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py @@ -0,0 +1,84 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +# model settings +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict(type='NASFPN', stack_times=7, norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/openimages/README.md b/configs/mmdet/openimages/README.md new file mode 100644 index 00000000..a2f2c136 --- /dev/null +++ b/configs/mmdet/openimages/README.md @@ -0,0 +1,143 @@ +# Open Images Dataset + +> [Open Images Dataset](https://arxiv.org/abs/1811.00982) + + +## Abstract + + +#### Open Images v6 + +[Open Images](https://storage.googleapis.com/openimages/web/index.html) is a dataset of ~9M images annotated with image-level labels, +object bounding boxes, object segmentation masks, visual relationships, +and localized narratives: + +- It contains a total of 16M bounding boxes for 600 object classes on +1.9M images, making it the largest existing dataset with object location +annotations. The boxes have been largely manually drawn by professional +annotators to ensure accuracy and consistency. The images are very diverse +and often contain complex scenes with several objects (8.3 per image on +average). + +- Open Images also offers visual relationship annotations, indicating pairs +of objects in particular relations (e.g. "woman playing guitar", "beer on +table"), object properties (e.g. "table is wooden"), and human actions (e.g. +"woman is jumping"). In total it has 3.3M annotations from 1,466 distinct +relationship triplets. + +- In V5 we added segmentation masks for 2.8M object instances in 350 classes. +Segmentation masks mark the outline of objects, which characterizes their +spatial extent to a much higher level of detail. + +- In V6 we added 675k localized narratives: multimodal descriptions of images +consisting of synchronized voice, text, and mouse traces over the objects being +described. (Note we originally launched localized narratives only on train in V6, +but since July 2020 we also have validation and test covered.) + +- Finally, the dataset is annotated with 59.9M image-level labels spanning 19,957 +classes. + +We believe that having a single dataset with unified annotations for image +classification, object detection, visual relationship detection, instance +segmentation, and multimodal image descriptions will enable to study these +tasks jointly and stimulate progress towards genuine scene understanding. + + +
+ +
+ +#### Open Images Challenge 2019 + +[Open Images Challenges 2019](https://storage.googleapis.com/openimages/web/challenge2019.html) is based on the V5 release of the Open +Images dataset. The images of the dataset are very varied and +often contain complex scenes with several objects (explore the dataset). + +## Citation + +``` +@article{OpenImages, + author = {Alina Kuznetsova and Hassan Rom and Neil Alldrin and Jasper Uijlings and Ivan Krasin and Jordi Pont-Tuset and Shahab Kamali and Stefan Popov and Matteo Malloci and Alexander Kolesnikov and Tom Duerig and Vittorio Ferrari}, + title = {The Open Images Dataset V4: Unified image classification, object detection, and visual relationship detection at scale}, + year = {2020}, + journal = {IJCV} +} +``` + +## Prepare Dataset + +1. You need to download and extract Open Images dataset. + +2. The Open Images dataset does not have image metas (width and height of the image), +which will be used during evaluation. We suggest to get test image metas before +training/testing by using `tools/misc/get_image_metas.py`. + + **Usage** + ```shell + python tools/misc/get_image_metas.py ${CONFIG} \ + --out ${OUTPUT FILE NAME} + ``` + +3. The directory should be like this: + + ```none + mmdetection + ├── mmdet + ├── tools + ├── configs + ├── data + │ ├── OpenImages + │ │ ├── annotations + │ │ │ ├── bbox_labels_600_hierarchy.json + │ │ │ ├── class-descriptions-boxable.csv + │ │ │ ├── oidv6-train-annotations-bbox.scv + │ │ │ ├── validation-annotations-bbox.csv + │ │ │ ├── validation-annotations-human-imagelabels-boxable.csv + │ │ │ ├── validation-image-metas.pkl # get from script + │ │ ├── challenge2019 + │ │ │ ├── challenge-2019-train-detection-bbox.txt + │ │ │ ├── challenge-2019-validation-detection-bbox.txt + │ │ │ ├── class_label_tree.np + │ │ │ ├── class_sample_train.pkl + │ │ │ ├── challenge-2019-validation-detection-human-imagelabels.csv # download from official website + │ │ │ ├── challenge-2019-validation-metas.pkl # get from script + │ │ ├── OpenImages + │ │ │ ├── train # training images + │ │ │ ├── test # testing images + │ │ │ ├── validation # validation images + ``` + +**Note**: +1. The training and validation images of Open Images Challenge dataset are based on +Open Images v6, but the test images are different. +2. The Open Images Challenges annotations are obtained from [TSD](https://github.com/Sense-X/TSD). +You can also download the annotations from [official website](https://storage.googleapis.com/openimages/web/challenge2019_downloads.html), +and set data.train.type=OpenImagesDataset, data.val.type=OpenImagesDataset, and data.test.type=OpenImagesDataset in the config +3. If users do not want to use `validation-annotations-human-imagelabels-boxable.csv` and `challenge-2019-validation-detection-human-imagelabels.csv` +users can set `data.val.load_image_level_labels=False` and `data.test.load_image_level_labels=False` in the config. +Please note that loading image-levels label is the default of Open Images evaluation metric. +More details please refer to the [official website](https://storage.googleapis.com/openimages/web/evaluation.html) + +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Sampler | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | 1x | Group Sampler | 7.7 | - | 51.6 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_20211130_231159-e87ab7ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_20211130_231159.log.json) | +| Faster R-CNN | R-50 | pytorch | 1x | Class Aware Sampler | 7.7 | - | 60.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_20220306_202424-98c630e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_20220306_202424.log.json) | +| Faster R-CNN (Challenge 2019) | R-50 | pytorch | 1x | Group Sampler | 7.7 | - | 54.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge_20220114_045100-0e79e5df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge_20220114_045100.log.json) | +| Faster R-CNN (Challenge 2019) | R-50 | pytorch | 1x | Class Aware Sampler | 7.1 | - | 65.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge_20220221_192021-34c402d9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge_20220221_192021.log.json) | +| Retinanet | R-50 | pytorch | 1x | Group Sampler | 6.6 | - | 61.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/retinanet_r50_fpn_32x2_1x_openimages.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/retinanet_r50_fpn_32x2_1x_openimages/retinanet_r50_fpn_32x2_1x_openimages_20211223_071954-d2ae5462.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/retinanet_r50_fpn_32x2_1x_openimages/retinanet_r50_fpn_32x2_1x_openimages_20211223_071954.log.json) | +| SSD | VGG16 | pytorch | 36e | Group Sampler | 10.8 | - | 35.4 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/openimages/ssd300_32x8_36e_openimages.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/openimages/ssd300_32x8_36e_openimages/ssd300_32x8_36e_openimages_20211224_000232-dce93846.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/openimages/ssd300_32x8_36e_openimages/ssd300_32x8_36e_openimages_20211224_000232.log.json) | + +**Notes:** + +- 'cas' is short for 'Class Aware Sampler' + +### Results of consider image level labels + +| Architecture | Sampler | Consider Image Level Labels | box AP| +|:------------:|:-------:|:---------------------------:|:-----:| +|Faster R-CNN r50 (Challenge 2019)| Group Sampler| w/o | 62.19 | +|Faster R-CNN r50 (Challenge 2019)| Group Sampler| w/ | 54.87 | +|Faster R-CNN r50 (Challenge 2019)| Class Aware Sampler| w/o | 71.77 | +|Faster R-CNN r50 (Challenge 2019)| Class Aware Sampler| w/ | 64.98 | diff --git a/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py new file mode 100644 index 00000000..3dfc341b --- /dev/null +++ b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/openimages_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict(roi_head=dict(bbox_head=dict(num_classes=601))) + +# Using 32 GPUS while training +optimizer = dict(type='SGD', lr=0.08, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=26000, + warmup_ratio=1.0 / 64, + step=[8, 11]) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py new file mode 100644 index 00000000..c8900adc --- /dev/null +++ b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py @@ -0,0 +1,47 @@ +_base_ = ['faster_rcnn_r50_fpn_32x2_1x_openimages.py'] + +model = dict( + roi_head=dict(bbox_head=dict(num_classes=500)), + test_cfg=dict(rcnn=dict(score_thr=0.01))) + +# dataset settings +dataset_type = 'OpenImagesChallengeDataset' +data_root = 'data/OpenImages/' +data = dict( + train=dict( + type=dataset_type, + ann_file=data_root + + 'challenge2019/challenge-2019-train-detection-bbox.txt', + img_prefix=data_root + 'OpenImages/', + label_file=data_root + 'challenge2019/cls-label-description.csv', + hierarchy_file=data_root + 'challenge2019/class_label_tree.np'), + val=dict( + type=dataset_type, + ann_file=data_root + + 'challenge2019/challenge-2019-validation-detection-bbox.txt', + img_prefix=data_root + 'OpenImages/', + label_file=data_root + 'challenge2019/cls-label-description.csv', + hierarchy_file=data_root + 'challenge2019/class_label_tree.np', + meta_file=data_root + + 'challenge2019/challenge-2019-validation-metas.pkl', + image_level_ann_file=data_root + + 'challenge2019/challenge-2019-validation-detection-' + 'human-imagelabels.csv'), + test=dict( + type=dataset_type, + ann_file=data_root + + 'challenge2019/challenge-2019-validation-detection-bbox.txt', + img_prefix=data_root + 'OpenImages/', + label_file=data_root + 'challenge2019/cls-label-description.csv', + hierarchy_file=data_root + 'challenge2019/class_label_tree.np', + meta_file=data_root + + 'challenge2019/challenge-2019-validation-metas.pkl', + image_level_ann_file=data_root + + 'challenge2019/challenge-2019-validation-detection-' + 'human-imagelabels.csv')) +evaluation = dict(interval=1, metric='mAP') + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py new file mode 100644 index 00000000..88d029d6 --- /dev/null +++ b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py @@ -0,0 +1,5 @@ +_base_ = ['faster_rcnn_r50_fpn_32x2_1x_openimages.py'] + +# Use ClassAwareSampler +data = dict( + train_dataloader=dict(class_aware_sampler=dict(num_sample_class=1))) diff --git a/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py new file mode 100644 index 00000000..26bd64e6 --- /dev/null +++ b/configs/mmdet/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py @@ -0,0 +1,5 @@ +_base_ = ['faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py'] + +# Use ClassAwareSampler +data = dict( + train_dataloader=dict(class_aware_sampler=dict(num_sample_class=1))) diff --git a/configs/mmdet/openimages/metafile.yml b/configs/mmdet/openimages/metafile.yml new file mode 100644 index 00000000..9be17261 --- /dev/null +++ b/configs/mmdet/openimages/metafile.yml @@ -0,0 +1,102 @@ +Models: + - Name: faster_rcnn_r50_fpn_32x2_1x_openimages + In Collection: Faster R-CNN + Config: configs/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages.py + Metadata: + Training Memory (GB): 7.7 + Epochs: 12 + Training Data: Open Images v6 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images v6 + Metrics: + box AP: 51.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_20211130_231159-e87ab7ce.pth + + - Name: retinanet_r50_fpn_32x2_1x_openimages + In Collection: RetinaNet + Config: configs/openimages/retinanet_r50_fpn_32x2_1x_openimages.py + Metadata: + Training Memory (GB): 6.6 + Epochs: 12 + Training Data: Open Images v6 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images v6 + Metrics: + box AP: 61.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/retinanet_r50_fpn_32x2_1x_openimages/retinanet_r50_fpn_32x2_1x_openimages_20211223_071954-d2ae5462.pth + + - Name: ssd300_32x8_36e_openimages + In Collection: SSD + Config: configs/openimages/ssd300_32x8_36e_openimages + Metadata: + Training Memory (GB): 10.8 + Epochs: 36 + Training Data: Open Images v6 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images v6 + Metrics: + box AP: 35.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/ssd300_32x8_36e_openimages/ssd300_32x8_36e_openimages_20211224_000232-dce93846.pth + + - Name: faster_rcnn_r50_fpn_32x2_1x_openimages_challenge + In Collection: Faster R-CNN + Config: configs/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge.py + Metadata: + Training Memory (GB): 7.7 + Epochs: 12 + Training Data: Open Images Challenge 2019 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images Challenge 2019 + Metrics: + box AP: 54.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_1x_openimages_challenge_20220114_045100-0e79e5df.pth + + - Name: faster_rcnn_r50_fpn_32x2_cas_1x_openimages + In Collection: Faster R-CNN + Config: configs/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages.py + Metadata: + Training Memory (GB): 7.7 + Epochs: 12 + Training Data: Open Images Challenge 2019 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images Challenge 2019 + Metrics: + box AP: 60.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_20220306_202424-98c630e5.pth + + - Name: faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge + In Collection: Faster R-CNN + Config: configs/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge.py + Metadata: + Training Memory (GB): 7.1 + Epochs: 12 + Training Data: Open Images Challenge 2019 + Training Techniques: + - SGD with Momentum + - Weight Decay + Results: + - Task: Object Detection + Dataset: Open Images Challenge 2019 + Metrics: + box AP: 65.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/openimages/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge/faster_rcnn_r50_fpn_32x2_cas_1x_openimages_challenge_20220221_192021-34c402d9.pth diff --git a/configs/mmdet/openimages/retinanet_r50_fpn_32x2_1x_openimages.py b/configs/mmdet/openimages/retinanet_r50_fpn_32x2_1x_openimages.py new file mode 100644 index 00000000..0191aa16 --- /dev/null +++ b/configs/mmdet/openimages/retinanet_r50_fpn_32x2_1x_openimages.py @@ -0,0 +1,22 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/openimages_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict(bbox_head=dict(num_classes=601)) + +optimizer = dict(type='SGD', lr=0.08, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=26000, + warmup_ratio=1.0 / 64, + step=[8, 11]) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (2 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/openimages/ssd300_32x8_36e_openimages.py b/configs/mmdet/openimages/ssd300_32x8_36e_openimages.py new file mode 100644 index 00000000..e2565b98 --- /dev/null +++ b/configs/mmdet/openimages/ssd300_32x8_36e_openimages.py @@ -0,0 +1,83 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/openimages_detection.py', + '../_base_/default_runtime.py', '../_base_/schedules/schedule_1x.py' +] +model = dict( + bbox_head=dict( + num_classes=601, + anchor_generator=dict(basesize_ratio_range=(0.2, 0.9)))) +# dataset settings +dataset_type = 'OpenImagesDataset' +data_root = 'data/OpenImages/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True, normed_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, # using 32 GPUS while training. + workers_per_gpu=0, # workers_per_gpu > 0 may occur out of memory + train=dict( + _delete_=True, + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/oidv6-train-annotations-bbox.csv', + img_prefix=data_root + 'OpenImages/train/', + label_file=data_root + + 'annotations/class-descriptions-boxable.csv', + hierarchy_file=data_root + + 'annotations/bbox_labels_600_hierarchy.json', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.04, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=20000, + warmup_ratio=0.001, + step=[8, 11]) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (32 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=256) diff --git a/configs/mmdet/paa/README.md b/configs/mmdet/paa/README.md new file mode 100644 index 00000000..0f299004 --- /dev/null +++ b/configs/mmdet/paa/README.md @@ -0,0 +1,47 @@ +# PAA + +> [Probabilistic Anchor Assignment with IoU Prediction for Object Detection](https://arxiv.org/abs/2007.08103) + + + +## Abstract + +In object detection, determining which anchors to assign as positive or negative samples, known as anchor assignment, has been revealed as a core procedure that can significantly affect a model's performance. In this paper we propose a novel anchor assignment strategy that adaptively separates anchors into positive and negative samples for a ground truth bounding box according to the model's learning status such that it is able to reason about the separation in a probabilistic manner. To do so we first calculate the scores of anchors conditioned on the model and fit a probability distribution to these scores. The model is then trained with anchors separated into positive and negative samples according to their probabilities. Moreover, we investigate the gap between the training and testing objectives and propose to predict the Intersection-over-Unions of detected boxes as a measure of localization quality to reduce the discrepancy. The combined score of classification and localization qualities serving as a box selection metric in non-maximum suppression well aligns with the proposed anchor assignment strategy and leads significant performance improvements. The proposed methods only add a single convolutional layer to RetinaNet baseline and does not require multiple anchors per location, so are efficient. Experimental results verify the effectiveness of the proposed methods. Especially, our models set new records for single-stage detectors on MS COCO test-dev dataset with various backbones. + +
+ +
+ +## Results and Models + +We provide config files to reproduce the object detection results in the +ECCV 2020 paper for Probabilistic Anchor Assignment with IoU +Prediction for Object Detection. + +| Backbone | Lr schd | Mem (GB) | Score voting | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------------:|:------:|:------:|:--------:| +| R-50-FPN | 12e | 3.7 | True | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.log.json) | +| R-50-FPN | 12e | 3.7 | False | 40.2 | - | +| R-50-FPN | 18e | 3.7 | True | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1.5x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.log.json) | +| R-50-FPN | 18e | 3.7 | False | 41.2 | - | +| R-50-FPN | 24e | 3.7 | True | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.log.json) | +| R-50-FPN | 36e | 3.7 | True | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722.log.json) | +| R-101-FPN | 12e | 6.2 | True | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.log.json) | +| R-101-FPN | 12e | 6.2 | False | 42.4 | - | +| R-101-FPN | 24e | 6.2 | True | 43.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.log.json) | +| R-101-FPN | 36e | 6.2 | True | 45.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.2 mAP. We report the best results. + +## Citation + +```latex +@inproceedings{paa-eccv2020, + title={Probabilistic Anchor Assignment with IoU Prediction for Object Detection}, + author={Kim, Kang and Lee, Hee Seok}, + booktitle = {ECCV}, + year={2020} +} +``` diff --git a/configs/mmdet/paa/metafile.yml b/configs/mmdet/paa/metafile.yml new file mode 100644 index 00000000..e08b663a --- /dev/null +++ b/configs/mmdet/paa/metafile.yml @@ -0,0 +1,104 @@ +Collections: + - Name: PAA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Probabilistic Anchor Assignment + - ResNet + Paper: + URL: https://arxiv.org/abs/2007.08103 + Title: 'Probabilistic Anchor Assignment with IoU Prediction for Object Detection' + README: configs/paa/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.4.0/mmdet/models/detectors/paa.py#L6 + Version: v2.4.0 + +Models: + - Name: paa_r50_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth + + - Name: paa_r50_fpn_1.5x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1.5x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth + + - Name: paa_r50_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth + + - Name: paa_r50_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth + + - Name: paa_r101_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth + + - Name: paa_r101_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth + + - Name: paa_r101_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth diff --git a/configs/mmdet/paa/paa_r101_fpn_1x_coco.py b/configs/mmdet/paa/paa_r101_fpn_1x_coco.py new file mode 100644 index 00000000..94f1c278 --- /dev/null +++ b/configs/mmdet/paa/paa_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/paa/paa_r101_fpn_2x_coco.py b/configs/mmdet/paa/paa_r101_fpn_2x_coco.py new file mode 100644 index 00000000..641ef764 --- /dev/null +++ b/configs/mmdet/paa/paa_r101_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r101_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/paa/paa_r101_fpn_mstrain_3x_coco.py b/configs/mmdet/paa/paa_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..71858ed6 --- /dev/null +++ b/configs/mmdet/paa/paa_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/paa/paa_r50_fpn_1.5x_coco.py b/configs/mmdet/paa/paa_r50_fpn_1.5x_coco.py new file mode 100644 index 00000000..aabce4af --- /dev/null +++ b/configs/mmdet/paa/paa_r50_fpn_1.5x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[12, 16]) +runner = dict(type='EpochBasedRunner', max_epochs=18) diff --git a/configs/mmdet/paa/paa_r50_fpn_1x_coco.py b/configs/mmdet/paa/paa_r50_fpn_1x_coco.py new file mode 100644 index 00000000..4c9c4aa7 --- /dev/null +++ b/configs/mmdet/paa/paa_r50_fpn_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PAA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='PAAHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/configs/mmdet/paa/paa_r50_fpn_2x_coco.py b/configs/mmdet/paa/paa_r50_fpn_2x_coco.py new file mode 100644 index 00000000..663d2c0d --- /dev/null +++ b/configs/mmdet/paa/paa_r50_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/paa/paa_r50_fpn_mstrain_3x_coco.py b/configs/mmdet/paa/paa_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..91fa28cd --- /dev/null +++ b/configs/mmdet/paa/paa_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,20 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/pafpn/README.md b/configs/mmdet/pafpn/README.md new file mode 100644 index 00000000..4a406af6 --- /dev/null +++ b/configs/mmdet/pafpn/README.md @@ -0,0 +1,34 @@ +# PAFPN + +> [Path Aggregation Network for Instance Segmentation](https://arxiv.org/abs/1803.01534) + + + +## Abstract + +The way that information propagates in neural networks is of great importance. In this paper, we propose Path Aggregation Network (PANet) aiming at boosting information flow in proposal-based instance segmentation framework. Specifically, we enhance the entire feature hierarchy with accurate localization signals in lower layers by bottom-up path augmentation, which shortens the information path between lower layers and topmost feature. We present adaptive feature pooling, which links feature grid and all feature levels to make useful information in each feature level propagate directly to following proposal subnetworks. A complementary branch capturing different views for each proposal is created to further improve mask prediction. These improvements are simple to implement, with subtle extra computational overhead. Our PANet reaches the 1st place in the COCO 2017 Challenge Instance Segmentation task and the 2nd place in Object Detection task without large-batch training. It is also state-of-the-art on MVD and Cityscapes. + +
+ +
+ +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.0 | 17.2 | 37.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_20200503_105836.log.json) | + +## Citation + +```latex +@inproceedings{liu2018path, + author = {Shu Liu and + Lu Qi and + Haifang Qin and + Jianping Shi and + Jiaya Jia}, + title = {Path Aggregation Network for Instance Segmentation}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2018} +} +``` diff --git a/configs/mmdet/pafpn/faster_rcnn_r50_pafpn_1x_coco.py b/configs/mmdet/pafpn/faster_rcnn_r50_pafpn_1x_coco.py new file mode 100644 index 00000000..b2fdef91 --- /dev/null +++ b/configs/mmdet/pafpn/faster_rcnn_r50_pafpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/pafpn/metafile.yml b/configs/mmdet/pafpn/metafile.yml new file mode 100644 index 00000000..f9cf97c8 --- /dev/null +++ b/configs/mmdet/pafpn/metafile.yml @@ -0,0 +1,38 @@ +Collections: + - Name: PAFPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PAFPN + Paper: + URL: https://arxiv.org/abs/1803.01534 + Title: 'Path Aggregation Network for Instance Segmentation' + README: configs/pafpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/necks/pafpn.py#L11 + Version: v2.0.0 + +Models: + - Name: faster_rcnn_r50_pafpn_1x_coco + In Collection: PAFPN + Config: configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth diff --git a/configs/mmdet/panoptic_fpn/README.md b/configs/mmdet/panoptic_fpn/README.md new file mode 100644 index 00000000..bc89293e --- /dev/null +++ b/configs/mmdet/panoptic_fpn/README.md @@ -0,0 +1,62 @@ +# Panoptic FPN + +> [Panoptic feature pyramid networks](https://arxiv.org/abs/1901.02446) + + + +## Abstract + +The recently introduced panoptic segmentation task has renewed our community's interest in unifying the tasks of instance segmentation (for thing classes) and semantic segmentation (for stuff classes). However, current state-of-the-art methods for this joint task use separate and dissimilar networks for instance and semantic segmentation, without performing any shared computation. In this work, we aim to unify these methods at the architectural level, designing a single network for both tasks. Our approach is to endow Mask R-CNN, a popular instance segmentation method, with a semantic segmentation branch using a shared Feature Pyramid Network (FPN) backbone. Surprisingly, this simple baseline not only remains effective for instance segmentation, but also yields a lightweight, top-performing method for semantic segmentation. In this work, we perform a detailed study of this minimally extended version of Mask R-CNN with FPN, which we refer to as Panoptic FPN, and show it is a robust and accurate baseline for both tasks. Given its effectiveness and conceptual simplicity, we hope our method can serve as a strong baseline and aid future research in panoptic segmentation. + +
+ +
+ +## Dataset + +PanopticFPN requires COCO and [COCO-panoptic](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) dataset for training and evaluation. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ │ ├── panoptic_train2017.json +│ │ │ ├── panoptic_train2017 +│ │ │ ├── panoptic_val2017.json +│ │ │ ├── panoptic_val2017 +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | PQ | SQ | RQ | PQ_th | SQ_th | RQ_th | PQ_st | SQ_st | RQ_st | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:----:|:----:|:----:|:-----:|:-----:|:-----:|:-----:|:-----:|:-----:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.7 | | 40.2 | 77.8 | 49.3 | 47.8 | 80.9 | 57.5 | 28.9 | 73.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153-9668fd13.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153.log.json) | +| R-50-FPN | pytorch | 3x | - | - | 42.5 | 78.1 | 51.7 | 50.3 | 81.5 | 60.3 | 30.7 | 73.0 | 38.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155-5650f98b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155.log.json) | +| R-101-FPN | pytorch | 1x | 6.7 | | 42.2 | 78.3 | 51.4 | 50.1 | 81.4 | 59.9 | 30.3 | 73.6 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950-ab9157a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950.log.json) | +| R-101-FPN | pytorch | 3x | - | - | 44.1 | 78.9 | 53.6 | 52.1 | 81.7 | 62.3 | 32.0 | 74.6 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712-9c99acc4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712.log.json) | + +## Citation + +The base method for panoptic segmentation task. + +```latex +@inproceedings{kirillov2018panopticfpn, + author = { + Alexander Kirillov, + Ross Girshick, + Kaiming He, + Piotr Dollar, + }, + title = {Panoptic Feature Pyramid Networks}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2019} +} +``` diff --git a/configs/mmdet/panoptic_fpn/metafile.yml b/configs/mmdet/panoptic_fpn/metafile.yml new file mode 100644 index 00000000..8c9d39dc --- /dev/null +++ b/configs/mmdet/panoptic_fpn/metafile.yml @@ -0,0 +1,70 @@ +Collections: + - Name: PanopticFPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PanopticFPN + Paper: + URL: https://arxiv.org/pdf/1901.02446 + Title: 'Panoptic feature pyramid networks' + README: configs/panoptic_fpn/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.16.0/mmdet/models/detectors/panoptic_fpn.py#L7 + Version: v2.16.0 + +Models: + - Name: panoptic_fpn_r50_fpn_1x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 12 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco/panoptic_fpn_r50_fpn_1x_coco_20210821_101153-9668fd13.pth + + - Name: panoptic_fpn_r50_fpn_mstrain_3x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 36 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco/panoptic_fpn_r50_fpn_mstrain_3x_coco_20210824_171155-5650f98b.pth + + - Name: panoptic_fpn_r101_fpn_1x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 12 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 42.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco/panoptic_fpn_r101_fpn_1x_coco_20210820_193950-ab9157a2.pth + + - Name: panoptic_fpn_r101_fpn_mstrain_3x_coco + In Collection: PanopticFPN + Config: configs/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 36 + Results: + - Task: Panoptic Segmentation + Dataset: COCO + Metrics: + PQ: 44.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco/panoptic_fpn_r101_fpn_mstrain_3x_coco_20210823_114712-9c99acc4.pth diff --git a/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py b/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py new file mode 100644 index 00000000..78b80798 --- /dev/null +++ b/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './panoptic_fpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py b/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..057e4811 --- /dev/null +++ b/configs/mmdet/panoptic_fpn/panoptic_fpn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './panoptic_fpn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py b/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..29955246 --- /dev/null +++ b/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_1x_coco.py @@ -0,0 +1,33 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_panoptic.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PanopticFPN', + semantic_head=dict( + type='PanopticFPNHead', + num_things_classes=80, + num_stuff_classes=53, + in_channels=256, + inner_channels=128, + start_level=0, + end_level=4, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + conv_cfg=None, + loss_seg=dict( + type='CrossEntropyLoss', ignore_index=255, loss_weight=0.5)), + panoptic_fusion_head=dict( + type='HeuristicFusionHead', + num_things_classes=80, + num_stuff_classes=53), + test_cfg=dict( + panoptic=dict( + score_thr=0.6, + max_per_img=100, + mask_thr_binary=0.5, + mask_overlap=0.5, + nms=dict(type='nms', iou_threshold=0.5, class_agnostic=True), + stuff_area_limit=4096))) + +custom_hooks = [] diff --git a/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py b/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..b5109353 --- /dev/null +++ b/configs/mmdet/panoptic_fpn/panoptic_fpn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = './panoptic_fpn_r50_fpn_1x_coco.py' + +# dataset settings +dataset_type = 'CocoPanopticDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadPanopticAnnotations', + with_bbox=True, + with_mask=True, + with_seg=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 4), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict( + _delete_=True, + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/panoptic_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'annotations/panoptic_train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/pascal_voc/README.md b/configs/mmdet/pascal_voc/README.md new file mode 100644 index 00000000..25797bcb --- /dev/null +++ b/configs/mmdet/pascal_voc/README.md @@ -0,0 +1,40 @@ +# Pascal VOC + +> [The Pascal Visual Object Classes (VOC) Challenge](https://link.springer.com/article/10.1007/s11263-009-0275-4) + + + +## Abstract + +The Pascal Visual Object Classes (VOC) challenge is a benchmark in visual object category recognition and detection, providing the vision and machine learning communities with a standard dataset of images and annotation, and standard evaluation procedures. Organised annually from 2005 to present, the challenge and its associated dataset has become accepted as the benchmark for object detection. + +This paper describes the dataset and evaluation procedure. We review the state-of-the-art in evaluated methods for both classification and detection, analyse whether the methods are statistically different, what they are learning from the images (e.g. the object or its context), and what the methods find easy or confuse. The paper concludes with lessons learnt in the three year history of the challenge, and proposes directions for future improvement and extension. + +
+ +
+ +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN C4 | R-50 | caffe | 18k | | - | 80.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712//home/dong/code_sensetime/2022Q1/mmdetection/work_dirs/prepare_voc/gather/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712_20220314_234327-847a14d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712_20220314_234327.log.json) | +| Faster R-CNN | R-50 | pytorch | 1x | 2.6 | - | 80.4 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/faster_rcnn_r50_fpn_1x_voc0712_20220320_192712-54bef0f3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/faster_rcnn_r50_fpn_1x_voc0712_20220320_192712.log.json) | +| Retinanet | R-50 | pytorch | 1x | 2.1 | - | 77.3 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200617-47cbdd0e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200616_014642.log.json) | +| SSD300 | VGG16 | - | 120e | - | - | 76.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/ssd300_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/ssd300_voc0712/ssd300_voc0712_20220320_194658-17edda1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/ssd300_voc0712/ssd300_voc0712_20220320_194658.log.json) | +| SSD512 | VGG16 | - | 120e | - | - | 79.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/ssd512_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/ssd512_voc0712/ssd512_voc0712_20220320_194717-03cefefe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/ssd512_voc0712/ssd512_voc0712_20220320_194717.log.json) | + +## Citation + +```latex +@Article{Everingham10, + author = "Everingham, M. and Van~Gool, L. and Williams, C. K. I. and Winn, J. and Zisserman, A.", + title = "The Pascal Visual Object Classes (VOC) Challenge", + journal = "International Journal of Computer Vision", + volume = "88", + year = "2010", + number = "2", + month = jun, + pages = "303--338", +} +``` diff --git a/configs/mmdet/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py b/configs/mmdet/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py new file mode 100644 index 00000000..7bb1d736 --- /dev/null +++ b/configs/mmdet/pascal_voc/faster_rcnn_r50_caffe_c4_mstrain_18k_voc0712.py @@ -0,0 +1,81 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) + +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 512), (1333, 544), (1333, 576), + (1333, 608), (1333, 640), (1333, 672), (1333, 704), + (1333, 736), (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=[ + data_root + 'VOC2007/ImageSets/Main/trainval.txt', + data_root + 'VOC2012/ImageSets/Main/trainval.txt' + ], + img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline)) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=100, + warmup_ratio=0.001, + step=[12000, 16000]) + +# Runner type +runner = dict(type='IterBasedRunner', max_iters=18000) + +checkpoint_config = dict(interval=3000) +evaluation = dict(interval=3000, metric='mAP') diff --git a/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py b/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py new file mode 100644 index 00000000..7866aceb --- /dev/null +++ b/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py b/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py new file mode 100644 index 00000000..12eee2c1 --- /dev/null +++ b/configs/mmdet/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py @@ -0,0 +1,75 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) + +CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', + 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', + 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') + +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file='data/voc0712_trainval.json', + img_prefix='data/VOCdevkit', + pipeline=train_pipeline, + classes=CLASSES)), + val=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES), + test=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/configs/mmdet/pascal_voc/retinanet_r50_fpn_1x_voc0712.py b/configs/mmdet/pascal_voc/retinanet_r50_fpn_1x_voc0712.py new file mode 100644 index 00000000..b4b050dd --- /dev/null +++ b/configs/mmdet/pascal_voc/retinanet_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=20)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/configs/mmdet/pascal_voc/ssd300_voc0712.py b/configs/mmdet/pascal_voc/ssd300_voc0712.py new file mode 100644 index 00000000..e7008aef --- /dev/null +++ b/configs/mmdet/pascal_voc/ssd300_voc0712.py @@ -0,0 +1,74 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + num_classes=20, anchor_generator=dict(basesize_ratio_range=(0.2, + 0.9)))) +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + type='RepeatDataset', times=10, dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 20]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (8 samples per GPU) +auto_scale_lr = dict(base_batch_size=64) diff --git a/configs/mmdet/pascal_voc/ssd512_voc0712.py b/configs/mmdet/pascal_voc/ssd512_voc0712.py new file mode 100644 index 00000000..f4627c2d --- /dev/null +++ b/configs/mmdet/pascal_voc/ssd512_voc0712.py @@ -0,0 +1,57 @@ +_base_ = 'ssd300_voc0712.py' +input_size = 512 +model = dict( + neck=dict( + out_channels=(512, 1024, 512, 256, 256, 256, 256), + level_strides=(2, 2, 2, 2, 1), + level_paddings=(1, 1, 1, 1, 1), + last_kernel_size=4), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + input_size=input_size, + strides=[8, 16, 32, 64, 128, 256, 512], + basesize_ratio_range=(0.15, 0.9), + ratios=([2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2])))) +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/configs/mmdet/pisa/README.md b/configs/mmdet/pisa/README.md new file mode 100644 index 00000000..d5329418 --- /dev/null +++ b/configs/mmdet/pisa/README.md @@ -0,0 +1,50 @@ +# PISA + +> [Prime Sample Attention in Object Detection](https://arxiv.org/abs/1904.04821) + + + +## Abstract + +It is a common paradigm in object detection frameworks to treat all samples equally and target at maximizing the performance on average. In this work, we revisit this paradigm through a careful study on how different samples contribute to the overall performance measured in terms of mAP. Our study suggests that the samples in each mini-batch are neither independent nor equally important, and therefore a better classifier on average does not necessarily mean higher mAP. Motivated by this study, we propose the notion of Prime Samples, those that play a key role in driving the detection performance. We further develop a simple yet effective sampling and learning strategy called PrIme Sample Attention (PISA) that directs the focus of the training process towards such samples. Our experiments demonstrate that it is often more effective to focus on prime samples than hard samples when training a detector. Particularly, On the MSCOCO dataset, PISA outperforms the random sampling baseline and hard mining schemes, e.g., OHEM and Focal Loss, consistently by around 2% on both single-stage and two-stage detectors, even with a strong backbone ResNeXt-101. + +
+ +
+ +## Results and Models + +| PISA | Network | Backbone | Lr schd | box AP | mask AP | Config | Download | +|:----:|:-------:|:-------------------:|:-------:|:------:|:-------:|:------:|:--------:| +| × | Faster R-CNN | R-50-FPN | 1x | 36.4 | | - | +| √ | Faster R-CNN | R-50-FPN | 1x | 38.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco_20200506_185619.log.json) | +| × | Faster R-CNN | X101-32x4d-FPN | 1x | 40.1 | | - | +| √ | Faster R-CNN | X101-32x4d-FPN | 1x | 41.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco_20200505_181503.log.json) | +| × | Mask R-CNN | R-50-FPN | 1x | 37.3 | 34.2 | - | +| √ | Mask R-CNN | R-50-FPN | 1x | 39.1 | 35.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco_20200508_150500.log.json) | +| × | Mask R-CNN | X101-32x4d-FPN | 1x | 41.1 | 37.1 | - | +| √ | Mask R-CNN | X101-32x4d-FPN | 1x | | | | +| × | RetinaNet | R-50-FPN | 1x | 35.6 | | - | +| √ | RetinaNet | R-50-FPN | 1x | 36.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco_20200504_014311.log.json) | +| × | RetinaNet | X101-32x4d-FPN | 1x | 39.0 | | - | +| √ | RetinaNet | X101-32x4d-FPN | 1x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco_20200505_001404.log.json) | +| × | SSD300 | VGG16 | 1x | 25.6 | | - | +| √ | SSD300 | VGG16 | 1x | 27.6 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd300_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco_20200504_144325.log.json) | +| × | SSD300 | VGG16 | 1x | 29.3 | | - | +| √ | SSD300 | VGG16 | 1x | 31.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd512_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco_20200508_131030.log.json) | + +**Notes:** + +- In the original paper, all models are trained and tested on mmdet v1.x, thus results may not be exactly the same with this release on v2.0. +- It is noted PISA only modifies the training pipeline so the inference time remains the same with the baseline. + +## Citation + +```latex +@inproceedings{cao2019prime, + title={Prime sample attention in object detection}, + author={Cao, Yuhang and Chen, Kai and Loy, Chen Change and Lin, Dahua}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2020} +} +``` diff --git a/configs/mmdet/pisa/metafile.yml b/configs/mmdet/pisa/metafile.yml new file mode 100644 index 00000000..cd43afb0 --- /dev/null +++ b/configs/mmdet/pisa/metafile.yml @@ -0,0 +1,110 @@ +Collections: + - Name: PISA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - PISA + - RPN + - ResNet + - RoIPool + Paper: + URL: https://arxiv.org/abs/1904.04821 + Title: 'Prime Sample Attention in Object Detection' + README: configs/pisa/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/roi_heads/pisa_roi_head.py#L8 + Version: v2.1.0 + +Models: + - Name: pisa_faster_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth + + - Name: pisa_faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth + + - Name: pisa_mask_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth + + - Name: pisa_retinanet_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth + + - Name: pisa_retinanet_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth + + - Name: pisa_ssd300_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd300_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 27.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth + + - Name: pisa_ssd512_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd512_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 31.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth diff --git a/configs/mmdet/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..71e65b0b --- /dev/null +++ b/configs/mmdet/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..16edd99d --- /dev/null +++ b/configs/mmdet/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 00000000..047a2934 --- /dev/null +++ b/configs/mmdet/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..2186a8f6 --- /dev/null +++ b/configs/mmdet/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/configs/mmdet/pisa/pisa_retinanet_r50_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 00000000..70f89e22 --- /dev/null +++ b/configs/mmdet/pisa/pisa_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/configs/mmdet/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py b/configs/mmdet/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 00000000..b97b6720 --- /dev/null +++ b/configs/mmdet/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_x101_32x4d_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/configs/mmdet/pisa/pisa_ssd300_coco.py b/configs/mmdet/pisa/pisa_ssd300_coco.py new file mode 100644 index 00000000..b5cc0064 --- /dev/null +++ b/configs/mmdet/pisa/pisa_ssd300_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd300_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/pisa/pisa_ssd512_coco.py b/configs/mmdet/pisa/pisa_ssd512_coco.py new file mode 100644 index 00000000..3219d6d6 --- /dev/null +++ b/configs/mmdet/pisa/pisa_ssd512_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd512_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/point_rend/README.md b/configs/mmdet/point_rend/README.md new file mode 100644 index 00000000..a55560af --- /dev/null +++ b/configs/mmdet/point_rend/README.md @@ -0,0 +1,33 @@ +# PointRend + +> [PointRend: Image Segmentation as Rendering](https://arxiv.org/abs/1912.08193) + + + +## Abstract + +We present a new method for efficient high-quality image segmentation of objects and scenes. By analogizing classical computer graphics methods for efficient rendering with over- and undersampling challenges faced in pixel labeling tasks, we develop a unique perspective of image segmentation as a rendering problem. From this vantage, we present the PointRend (Point-based Rendering) neural network module: a module that performs point-based segmentation predictions at adaptively selected locations based on an iterative subdivision algorithm. PointRend can be flexibly applied to both instance and semantic segmentation tasks by building on top of existing state-of-the-art models. While many concrete implementations of the general idea are possible, we show that a simple design already achieves excellent results. Qualitatively, PointRend outputs crisp object boundaries in regions that are over-smoothed by previous methods. Quantitatively, PointRend yields significant gains on COCO and Cityscapes, for both instance and semantic segmentation. PointRend's efficiency enables output resolutions that are otherwise impractical in terms of memory or computation compared to existing approaches. + +
+ +
+ +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.6 | | 38.4 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco_20200612_161407.log.json) | +| R-50-FPN | caffe | 3x | 4.6 | | 41.0 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco_20200614_002632.log.json) | + +Note: All models are trained with multi-scale, the input image shorter side is randomly scaled to one of (640, 672, 704, 736, 768, 800). + +## Citation + +```latex +@InProceedings{kirillov2019pointrend, + title={{PointRend}: Image Segmentation as Rendering}, + author={Alexander Kirillov and Yuxin Wu and Kaiming He and Ross Girshick}, + journal={ArXiv:1912.08193}, + year={2019} +} +``` diff --git a/configs/mmdet/point_rend/metafile.yml b/configs/mmdet/point_rend/metafile.yml new file mode 100644 index 00000000..82aea05b --- /dev/null +++ b/configs/mmdet/point_rend/metafile.yml @@ -0,0 +1,54 @@ +Collections: + - Name: PointRend + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PointRend + - FPN + - ResNet + Paper: + URL: https://arxiv.org/abs/1912.08193 + Title: 'PointRend: Image Segmentation as Rendering' + README: configs/point_rend/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.2.0/mmdet/models/detectors/point_rend.py#L6 + Version: v2.2.0 + +Models: + - Name: point_rend_r50_caffe_fpn_mstrain_1x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth + + - Name: point_rend_r50_caffe_fpn_mstrain_3x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth diff --git a/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py b/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 00000000..0c0e563d --- /dev/null +++ b/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,44 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# model settings +model = dict( + type='PointRend', + roi_head=dict( + type='PointRendRoIHead', + mask_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='concat', + roi_layer=dict( + _delete_=True, type='SimpleRoIAlign', output_size=14), + out_channels=256, + featmap_strides=[4]), + mask_head=dict( + _delete_=True, + type='CoarseMaskHead', + num_fcs=2, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + point_head=dict( + type='MaskPointHead', + num_fcs=3, + in_channels=256, + fc_channels=256, + num_classes=80, + coarse_pred_each_layer=True, + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + mask_size=7, + num_points=14 * 14, + oversample_ratio=3, + importance_sample_ratio=0.75)), + test_cfg=dict( + rcnn=dict( + subdivision_steps=5, + subdivision_num_points=28 * 28, + scale_factor=2))) diff --git a/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py b/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..169278e5 --- /dev/null +++ b/configs/mmdet/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './point_rend_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/pvt/README.md b/configs/mmdet/pvt/README.md new file mode 100644 index 00000000..25528d0d --- /dev/null +++ b/configs/mmdet/pvt/README.md @@ -0,0 +1,57 @@ +# PVT + +> [Pyramid vision transformer: A versatile backbone for dense prediction without convolutions](https://arxiv.org/abs/2102.12122) + + + +## Abstract + +Although using convolutional neural networks (CNNs) as backbones achieves great successes in computer vision, this work investigates a simple backbone network useful for many dense prediction tasks without convolutions. Unlike the recently-proposed Transformer model (e.g., ViT) that is specially designed for image classification, we propose Pyramid Vision Transformer~(PVT), which overcomes the difficulties of porting Transformer to various dense prediction tasks. PVT has several merits compared to prior arts. (1) Different from ViT that typically has low-resolution outputs and high computational and memory cost, PVT can be not only trained on dense partitions of the image to achieve high output resolution, which is important for dense predictions but also using a progressive shrinking pyramid to reduce computations of large feature maps. (2) PVT inherits the advantages from both CNN and Transformer, making it a unified backbone in various vision tasks without convolutions by simply replacing CNN backbones. (3) We validate PVT by conducting extensive experiments, showing that it boosts the performance of many downstream tasks, e.g., object detection, semantic, and instance segmentation. For example, with a comparable number of parameters, RetinaNet+PVT achieves 40.4 AP on the COCO dataset, surpassing RetinNet+ResNet50 (36.3 AP) by 4.1 absolute AP. We hope PVT could serve as an alternative and useful backbone for pixel-level predictions and facilitate future researches. + +Transformer recently has shown encouraging progresses in computer vision. In this work, we present new baselines by improving the original Pyramid Vision Transformer (abbreviated as PVTv1) by adding three designs, including (1) overlapping patch embedding, (2) convolutional feed-forward networks, and (3) linear complexity attention layers. +With these modifications, our PVTv2 significantly improves PVTv1 on three tasks e.g., classification, detection, and segmentation. Moreover, PVTv2 achieves comparable or better performances than recent works such as Swin Transformer. We hope this work will facilitate state-of-the-art Transformer researches in computer vision. + +
+ +
+ +## Results and Models + +### RetinaNet (PVTv1) + +| Backbone | Lr schd | Mem (GB) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------:|:------:|:--------:| +| PVT-Tiny | 12e |8.5 |36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_t_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110-17b566bd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110.log.json) | +| PVT-Small | 12e |14.5 |40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_s_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921-b6c94a5b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921.log.json) | +| PVT-Medium | 12e |20.9 |41.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_m_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243-55effa1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243.log.json) | + +### RetinaNet (PVTv2) + +| Backbone | Lr schd | Mem (GB) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------:|:------:|:--------:| +| PVTv2-B0 | 12e |7.4 |37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b0_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157-13e9aabe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157.log.json) | +| PVTv2-B1 | 12e |9.5 |41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b1_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318-7e169a7d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318.log.json) | +| PVTv2-B2 | 12e |16.2 |44.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b2_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843-529f0b9a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843.log.json) | +| PVTv2-B3 | 12e |23.0 |46.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b3_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512-8357deff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512.log.json) | +| PVTv2-B4 | 12e |17.0 |46.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b4_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151-83795c86.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151.log.json) | +| PVTv2-B5 | 12e |18.7 |46.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pvt/retinanet_pvt_v2_b5_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800-3420eb57.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800.log.json) | + +## Citation + +```latex +@article{wang2021pyramid, + title={Pyramid vision transformer: A versatile backbone for dense prediction without convolutions}, + author={Wang, Wenhai and Xie, Enze and Li, Xiang and Fan, Deng-Ping and Song, Kaitao and Liang, Ding and Lu, Tong and Luo, Ping and Shao, Ling}, + journal={arXiv preprint arXiv:2102.12122}, + year={2021} +} +``` + +```latex +@article{wang2021pvtv2, + title={PVTv2: Improved Baselines with Pyramid Vision Transformer}, + author={Wang, Wenhai and Xie, Enze and Li, Xiang and Fan, Deng-Ping and Song, Kaitao and Liang, Ding and Lu, Tong and Luo, Ping and Shao, Ling}, + journal={arXiv preprint arXiv:2106.13797}, + year={2021} +} +``` diff --git a/configs/mmdet/pvt/metafile.yml b/configs/mmdet/pvt/metafile.yml new file mode 100644 index 00000000..58843784 --- /dev/null +++ b/configs/mmdet/pvt/metafile.yml @@ -0,0 +1,243 @@ +Models: + - Name: retinanet_pvt-t_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvt-t_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformer + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-t_fpn_1x_coco/retinanet_pvt-t_fpn_1x_coco_20210831_103110-17b566bd.pth + Paper: + URL: https://arxiv.org/abs/2102.12122 + Title: "Pyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L315 + Version: 2.17.0 + + - Name: retinanet_pvt-s_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvt-s_fpn_1x_coco.py + Metadata: + Training Memory (GB): 14.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformer + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-s_fpn_1x_coco/retinanet_pvt-s_fpn_1x_coco_20210906_142921-b6c94a5b.pth + Paper: + URL: https://arxiv.org/abs/2102.12122 + Title: "Pyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L315 + Version: 2.17.0 + + - Name: retinanet_pvt-m_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvt-m_fpn_1x_coco.py + Metadata: + Training Memory (GB): 20.9 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformer + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvt-m_fpn_1x_coco/retinanet_pvt-m_fpn_1x_coco_20210831_103243-55effa1b.pth + Paper: + URL: https://arxiv.org/abs/2102.12122 + Title: "Pyramid Vision Transformer: A Versatile Backbone for Dense Prediction without Convolutions" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L315 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b0_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b0_fpn_1x_coco/retinanet_pvtv2-b0_fpn_1x_coco_20210831_103157-13e9aabe.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b1_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b1_fpn_1x_coco/retinanet_pvtv2-b1_fpn_1x_coco_20210831_103318-7e169a7d.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b2_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py + Metadata: + Training Memory (GB): 16.2 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b2_fpn_1x_coco/retinanet_pvtv2-b2_fpn_1x_coco_20210901_174843-529f0b9a.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b3_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py + Metadata: + Training Memory (GB): 23.0 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b3_fpn_1x_coco/retinanet_pvtv2-b3_fpn_1x_coco_20210903_151512-8357deff.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b4_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py + Metadata: + Training Memory (GB): 17.0 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b4_fpn_1x_coco/retinanet_pvtv2-b4_fpn_1x_coco_20210901_170151-83795c86.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 + + - Name: retinanet_pvtv2-b5_fpn_1x_coco + In Collection: RetinaNet + Config: configs/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py + Metadata: + Training Memory (GB): 18.7 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x NVIDIA V100 GPUs + Architecture: + - PyramidVisionTransformerV2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pvt/retinanet_pvtv2-b5_fpn_1x_coco/retinanet_pvtv2-b5_fpn_1x_coco_20210902_201800-3420eb57.pth + Paper: + URL: https://arxiv.org/abs/2106.13797 + Title: "PVTv2: Improved Baselines with Pyramid Vision Transformer" + README: configs/pvt/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.17.0/mmdet/models/backbones/pvt.py#L543 + Version: 2.17.0 diff --git a/configs/mmdet/pvt/retinanet_pvt-l_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvt-l_fpn_1x_coco.py new file mode 100644 index 00000000..e299f2a0 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvt-l_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 8, 27, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_large.pth'))) +fp16 = dict(loss_scale=dict(init_scale=512)) diff --git a/configs/mmdet/pvt/retinanet_pvt-m_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvt-m_fpn_1x_coco.py new file mode 100644 index 00000000..b888f788 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvt-m_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 4, 18, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_medium.pth'))) diff --git a/configs/mmdet/pvt/retinanet_pvt-s_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvt-s_fpn_1x_coco.py new file mode 100644 index 00000000..46603488 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvt-s_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = 'retinanet_pvt-t_fpn_1x_coco.py' +model = dict( + backbone=dict( + num_layers=[3, 4, 6, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_small.pth'))) diff --git a/configs/mmdet/pvt/retinanet_pvt-t_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvt-t_fpn_1x_coco.py new file mode 100644 index 00000000..a6cff7d0 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvt-t_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RetinaNet', + backbone=dict( + _delete_=True, + type='PyramidVisionTransformer', + num_layers=[2, 2, 2, 2], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_tiny.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, weight_decay=0.0001) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py new file mode 100644 index 00000000..cbe2295d --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b0_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RetinaNet', + backbone=dict( + _delete_=True, + type='PyramidVisionTransformerV2', + embed_dims=32, + num_layers=[2, 2, 2, 2], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b0.pth')), + neck=dict(in_channels=[32, 64, 160, 256])) +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, weight_decay=0.0001) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py new file mode 100644 index 00000000..5374c509 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b1_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b1.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py new file mode 100644 index 00000000..cf9a18de --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b2_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 4, 6, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b2.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py new file mode 100644 index 00000000..7a47f820 --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b3_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 4, 18, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b3.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py new file mode 100644 index 00000000..9891d7bd --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b4_fpn_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 8, 27, 3], + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b4.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict( + _delete_=True, type='AdamW', lr=0.0001 / 1.4, weight_decay=0.0001) +# dataset settings +data = dict(samples_per_gpu=1, workers_per_gpu=1) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (1 samples per GPU) +auto_scale_lr = dict(base_batch_size=8) diff --git a/configs/mmdet/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py b/configs/mmdet/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py new file mode 100644 index 00000000..a9fea2eb --- /dev/null +++ b/configs/mmdet/pvt/retinanet_pvtv2-b5_fpn_1x_coco.py @@ -0,0 +1,19 @@ +_base_ = 'retinanet_pvtv2-b0_fpn_1x_coco.py' +model = dict( + backbone=dict( + embed_dims=64, + num_layers=[3, 6, 40, 3], + mlp_ratios=(4, 4, 4, 4), + init_cfg=dict(checkpoint='https://github.com/whai362/PVT/' + 'releases/download/v2/pvt_v2_b5.pth')), + neck=dict(in_channels=[64, 128, 320, 512])) +# optimizer +optimizer = dict( + _delete_=True, type='AdamW', lr=0.0001 / 1.4, weight_decay=0.0001) +# dataset settings +data = dict(samples_per_gpu=1, workers_per_gpu=1) + +# NOTE: `auto_scale_lr` is for automatically scaling LR, +# USER SHOULD NOT CHANGE ITS VALUES. +# base_batch_size = (8 GPUs) x (1 samples per GPU) +auto_scale_lr = dict(base_batch_size=8) diff --git a/configs/mmdet/queryinst/README.md b/configs/mmdet/queryinst/README.md new file mode 100644 index 00000000..c041662f --- /dev/null +++ b/configs/mmdet/queryinst/README.md @@ -0,0 +1,36 @@ +# QueryInst + +> [Instances as Queries](https://openaccess.thecvf.com/content/ICCV2021/html/Fang_Instances_As_Queries_ICCV_2021_paper.html) + + + +## Abstract + +We present QueryInst, a new perspective for instance segmentation. QueryInst is a multi-stage end-to-end system that treats instances of interest as learnable queries, enabling query based object detectors, e.g., Sparse R-CNN, to have strong instance segmentation performance. The attributes of instances such as categories, bounding boxes, instance masks, and instance association embeddings are represented by queries in a unified manner. In QueryInst, a query is shared by both detection and segmentation via dynamic convolutions and driven by parallelly-supervised multi-stage learning. We conduct extensive experiments on three challenging benchmarks, i.e., COCO, CityScapes, and YouTube-VIS to evaluate the effectiveness of QueryInst in object detection, instance segmentation, and video instance segmentation tasks. For the first time, we demonstrate that a simple end-to-end query based framework can achieve the state-of-the-art performance in various instance-level recognition tasks. + +
+ +
+ +## Results and Models + +| Model | Backbone | Style | Lr schd | Number of Proposals |Multi-Scale| RandomCrop | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:-------: |:-------: |:---------:|:------:|:------:|:------:|:--------:| +| QueryInst | R-50-FPN | pytorch | 1x | 100 | False | False | 42.0 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916-5a8f1998.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916.log.json) | +| QueryInst | R-50-FPN | pytorch | 3x | 100 | True | False | 44.8 | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643-7837af86.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643.log.json) | +| QueryInst | R-50-FPN | pytorch | 3x | 300 | True | True | 47.5 | 41.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802-85cffbd8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802.log.json) | +| QueryInst | R-101-FPN | pytorch | 3x | 100 | True | False | 46.4 | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048-91f9995b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048.log.json) | +| QueryInst | R-101-FPN | pytorch | 3x | 300 | True | True | 49.0 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621-76cce59f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621.log.json) | + +## Citation + +```latex +@InProceedings{Fang_2021_ICCV, + author = {Fang, Yuxin and Yang, Shusheng and Wang, Xinggang and Li, Yu and Fang, Chen and Shan, Ying and Feng, Bin and Liu, Wenyu}, + title = {Instances As Queries}, + booktitle = {Proceedings of the IEEE/CVF International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2021}, + pages = {6910-6919} +} +``` diff --git a/configs/mmdet/queryinst/metafile.yml b/configs/mmdet/queryinst/metafile.yml new file mode 100644 index 00000000..da7f0a72 --- /dev/null +++ b/configs/mmdet/queryinst/metafile.yml @@ -0,0 +1,100 @@ +Collections: + - Name: QueryInst + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - QueryInst + Paper: + URL: https://openaccess.thecvf.com/content/ICCV2021/papers/Fang_Instances_As_Queries_ICCV_2021_paper.pdf + Title: 'Instances as Queries' + README: configs/queryinst/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/detectors/queryinst.py + Version: v2.18.0 + +Models: + - Name: queryinst_r50_fpn_1x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_1x_coco/queryinst_r50_fpn_1x_coco_20210907_084916-5a8f1998.pth + + - Name: queryinst_r50_fpn_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco/queryinst_r50_fpn_mstrain_480-800_3x_coco_20210901_103643-7837af86.pth + + - Name: queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_101802-85cffbd8.pth + + - Name: queryinst_r101_fpn_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco/queryinst_r101_fpn_mstrain_480-800_3x_coco_20210904_104048-91f9995b.pth + + - Name: queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: QueryInst + Config: configs/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20210904_153621-76cce59f.pth diff --git a/configs/mmdet/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/configs/mmdet/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 00000000..fd138f5a --- /dev/null +++ b/configs/mmdet/queryinst/queryinst_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py b/configs/mmdet/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 00000000..07cae19c --- /dev/null +++ b/configs/mmdet/queryinst/queryinst_r101_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './queryinst_r50_fpn_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/configs/mmdet/queryinst/queryinst_r50_fpn_1x_coco.py b/configs/mmdet/queryinst/queryinst_r50_fpn_1x_coco.py new file mode 100644 index 00000000..48f5773b --- /dev/null +++ b/configs/mmdet/queryinst/queryinst_r50_fpn_1x_coco.py @@ -0,0 +1,138 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +num_stages = 6 +num_proposals = 100 +model = dict( + type='QueryInst', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=0, + add_extra_convs='on_input', + num_outs=4), + rpn_head=dict( + type='EmbeddingRPNHead', + num_proposals=num_proposals, + proposal_feature_channel=256), + roi_head=dict( + type='SparseRoIHead', + num_stages=num_stages, + stage_loss_weights=[1] * num_stages, + proposal_feature_channel=256, + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='DIIHead', + num_classes=80, + num_ffn_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + in_channels=256, + dropout=0.0, + ffn_act_cfg=dict(type='ReLU', inplace=True), + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=False, + target_means=[0., 0., 0., 0.], + target_stds=[0.5, 0.5, 1., 1.])) for _ in range(num_stages) + ], + mask_head=[ + dict( + type='DynamicMaskHead', + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=14, + with_proj=False, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + num_convs=4, + num_classes=80, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + conv_out_channels=256, + class_agnostic=False, + norm_cfg=dict(type='BN'), + upsample_cfg=dict(type='deconv', scale_factor=2), + loss_mask=dict( + type='DiceLoss', + loss_weight=8.0, + use_sigmoid=True, + activate=False, + eps=1e-5)) for _ in range(num_stages) + ]), + # training and testing settings + train_cfg=dict( + rpn=None, + rcnn=[ + dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0), + iou_cost=dict(type='IoUCost', iou_mode='giou', + weight=2.0)), + sampler=dict(type='PseudoSampler'), + pos_weight=1, + mask_size=28, + ) for _ in range(num_stages) + ]), + test_cfg=dict( + rpn=None, rcnn=dict(max_per_img=num_proposals, mask_thr_binary=0.5))) + +# optimizer +optimizer = dict( + _delete_=True, + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[8, 11], warmup_iters=1000) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/configs/mmdet/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/configs/mmdet/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 00000000..3089b3c6 --- /dev/null +++ b/configs/mmdet/queryinst/queryinst_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,54 @@ +_base_ = './queryinst_r50_fpn_mstrain_480-800_3x_coco.py' +num_proposals = 300 +model = dict( + rpn_head=dict(num_proposals=num_proposals), + test_cfg=dict( + _delete_=True, + rpn=None, + rcnn=dict(max_per_img=num_proposals, mask_thr_binary=0.5))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/configs/mmdet/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py b/configs/mmdet/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 00000000..89e2cd10 --- /dev/null +++ b/configs/mmdet/queryinst/queryinst_r50_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,23 @@ +_base_ = './queryinst_r50_fpn_1x_coco.py' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +min_values = (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, value) for value in min_values], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(policy='step', step=[27, 33]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/configs/mmdet/regnet/README.md b/configs/mmdet/regnet/README.md new file mode 100644 index 00000000..cb32d9b8 --- /dev/null +++ b/configs/mmdet/regnet/README.md @@ -0,0 +1,122 @@ +# RegNet + +> [Designing Network Design Spaces](https://arxiv.org/abs/2003.13678) + + + +## Abstract + + In this work, we present a new network design paradigm. Our goal is to help advance the understanding of network design and discover design principles that generalize across settings. Instead of focusing on designing individual network instances, we design network design spaces that parametrize populations of networks. The overall process is analogous to classic manual design of networks, but elevated to the design space level. Using our methodology we explore the structure aspect of network design and arrive at a low-dimensional design space consisting of simple, regular networks that we call RegNet. The core insight of the RegNet parametrization is surprisingly simple: widths and depths of good networks can be explained by a quantized linear function. We analyze the RegNet design space and arrive at interesting findings that do not match the current practice of network design. The RegNet design space provides simple and fast networks that work well across a wide range of flop regimes. Under comparable training settings and flops, the RegNet models outperform the popular EfficientNet models while being up to 5x faster on GPUs. + +
+ +
+ +## Introduction + +We implement RegNetX and RegNetY models in detection systems and provide their first results on Mask R-CNN, Faster R-CNN and RetinaNet. + +The pre-trained models are converted from [model zoo of pycls](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). + +## Usage + +To use a regnet model, there are two steps to do: + +1. Convert the model to ResNet-style supported by MMDetection +2. Modify backbone and neck in config accordingly + +### Convert model + +We already prepare models of FLOPs from 400M to 12G in our model zoo. + +For more general usage, we also provide script `regnet2mmdet.py` in the tools directory to convert the key of models pretrained by [pycls](https://github.com/facebookresearch/pycls/) to +ResNet-style checkpoints used in MMDetection. + +```bash +python -u tools/model_converters/regnet2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} +``` + +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + +### Modify config + +The users can modify the config's `depth` of backbone and corresponding keys in `arch` according to the configs in the [pycls model zoo](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). +The parameter `in_channels` in FPN can be found in the Figure 15 & 16 of the paper (`wi` in the legend). +This directory already provides some configs with their performance, using RegNetX from 800MF to 12GF level. +For other pre-trained models or self-implemented regnet models, the users are responsible to check these parameters by themselves. + +**Note**: Although Fig. 15 & 16 also provide `w0`, `wa`, `wm`, `group_w`, and `bot_mul` for `arch`, they are quantized thus inaccurate, using them sometimes produces different backbone that does not match the key in the pre-trained model. + +## Results and Models + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.4 | 12.0 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +|[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | +|[RegNetX-4.0GF-FPN](./mask_rcnn_regnetx-4GF_fpn_1x_coco.py)| pytorch | 1x |5.5||41.5|37.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217.log.json) | +| [R-101-FPN](../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py)| pytorch | 1x | 6.4 | 10.3 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +|[RegNetX-6.4GF-FPN](./mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py)| pytorch | 1x |6.1 ||41.0|37.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439.log.json) | +| [X-101-32x4d-FPN](../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | pytorch | 1x | 7.6 | 9.4 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +|[RegNetX-8.0GF-FPN](./mask_rcnn_regnetx-8GF_fpn_1x_coco.py)| pytorch | 1x |6.4 ||41.7|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515.log.json) | +|[RegNetX-12GF-FPN](./mask_rcnn_regnetx-12GF_fpn_1x_coco.py)| pytorch | 1x |7.4 ||42.2|38|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552.log.json) | +|[RegNetX-3.2GF-FPN-DCN-C3-C5](./mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726.log.json) | + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.0 | 18.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x | 4.5||39.9|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py)| pytorch | 2x | 4.5||41.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955.log.json) | + +### RetinaNet + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../retinanet/retinanet_r50_fpn_1x_coco.py) | pytorch | 1x | 3.8 | 16.6 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +|[RegNetX-800MF-FPN](./retinanet_regnetx-800MF_fpn_1x_coco.py)| pytorch | 1x |2.5||35.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-1.6GF-FPN](./retinanet_regnetx-1.6GF_fpn_1x_coco.py)| pytorch | 1x |3.3||37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-3.2GF-FPN](./retinanet_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |4.2 ||39.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | + +### Pre-trained models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Faster RCNN |[RegNetX-400MF-FPN](./faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.3 ||37.1|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112.log.json) | +|Faster RCNN |[RegNetX-800MF-FPN](./faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.8 ||38.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118.log.json) | +|Faster RCNN |[RegNetX-1.6GF-FPN](./faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |3.4 ||40.5|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325.log.json) | +|Faster RCNN |[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.4 ||42.3|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152.log.json) | +|Faster RCNN |[RegNetX-4GF-FPN](./faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.9 ||42.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-400MF-FPN](./mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.5 ||37.6|34.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-8aac57a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443.log.json) | +|Mask RCNN |[RegNetX-800MF-FPN](./mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.9 ||39.5|36.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-715d51f5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-1.6GF-FPN](./mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |3.6 ||40.9|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6764cff5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-4GF-FPN](./mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x |5.1 ||43.4|39.2|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-00f0331c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621.log.json) | +|Cascade Mask RCNN |[RegNetX-400MF-FPN](./cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.3||41.6|36.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619-5142f449.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619.log.json) | +|Cascade Mask RCNN |[RegNetX-800MF-FPN](./cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.8||42.8|37.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616-dcbd13f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-1.6GF-FPN](./cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.4||44.5|39.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616-75f29a61.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-3.2GF-FPN](./cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |6.4||45.8|40.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616-b9c2c58b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616.log.json) | +|Cascade Mask RCNN |[RegNetX-4GF-FPN](./cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | pytorch | 3x |6.9||45.8|40.0|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034-cbb1be4c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034.log.json) | + +### Notice + +1. The models are trained using a different weight decay, i.e., `weight_decay=5e-5` according to the setting in ImageNet training. This brings improvement of at least 0.7 AP absolute but does not improve the model using ResNet-50. +2. RetinaNets using RegNets are trained with learning rate 0.02 with gradient clip. We find that using learning rate 0.02 could improve the results by at least 0.7 AP absolute and gradient clip is necessary to stabilize the training. However, this does not improve the performance of ResNet-50-FPN RetinaNet. + +## Citation + +```latex +@article{radosavovic2020designing, + title={Designing Network Design Spaces}, + author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár}, + year={2020}, + eprint={2003.13678}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` diff --git a/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..358d85aa --- /dev/null +++ b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..84645718 --- /dev/null +++ b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,63 @@ +_base_ = [ + '../common/mstrain_3x_coco_instance.py', + '../_base_/models/cascade_mask_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict(weight_decay=0.00005) diff --git a/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..2a8990a6 --- /dev/null +++ b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..31578634 --- /dev/null +++ b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..41376ad8 --- /dev/null +++ b/configs/mmdet/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..385b5ca7 --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 00000000..88d270e3 --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py new file mode 100644 index 00000000..612490b4 --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..b7e6e1a3 --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict(weight_decay=0.00005) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..0a05f6e4 --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..98b3fc2b --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..67f448bd --- /dev/null +++ b/configs/mmdet/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..7970c3c8 --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py new file mode 100644 index 00000000..ce3661cf --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_12gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_12gf')), + neck=dict( + type='FPN', + in_channels=[224, 448, 896, 2240], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 00000000..44bf0d11 --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 00000000..5b534281 --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf'))) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 00000000..aca64d33 --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,66 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..c38dfa6a --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py new file mode 100644 index 00000000..874d485b --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..f0b65eab --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py new file mode 100644 index 00000000..99387d86 --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_6.4gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_6.4gf')), + neck=dict( + type='FPN', + in_channels=[168, 392, 784, 1624], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 00000000..335ebabf --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,26 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py b/configs/mmdet/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py new file mode 100644 index 00000000..1e7832ff --- /dev/null +++ b/configs/mmdet/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_8.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_8.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 720, 1920], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/metafile.yml b/configs/mmdet/regnet/metafile.yml new file mode 100644 index 00000000..ecd39531 --- /dev/null +++ b/configs/mmdet/regnet/metafile.yml @@ -0,0 +1,797 @@ +Models: + - Name: mask_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-4GF_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-6.4GF_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-8GF_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-12GF_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-3.2GF_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 24 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: retinanet_regnetx-800MF_fpn_1x_coco + In Collection: RetinaNet + Config: configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: retinanet_regnetx-1.6GF_fpn_1x_coco + In Collection: RetinaNet + Config: configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: retinanet_regnetx-3.2GF_fpn_1x_coco + In Collection: RetinaNet + Config: configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.3 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.8 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.4 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.4 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.9 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-8aac57a4.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.9 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-715d51f5.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.6 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6764cff5.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6e63e19c.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: Mask R-CNN + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.1 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-00f0331c.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210715_211619-5142f449.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.8 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210715_211616-dcbd13f4.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.4 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-1_20210715_211616-75f29a61.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-3_20210715_211616-b9c2c58b.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 + + - Name: cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: Cascade R-CNN + Config: configs/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.9 + Epochs: 36 + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/cascade_mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210715_212034-cbb1be4c.pth + Paper: + URL: https://arxiv.org/abs/2003.13678 + Title: 'Designing Network Design Spaces' + README: configs/regnet/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.1.0/mmdet/models/backbones/regnet.py#L11 + Version: v2.1.0 diff --git a/configs/mmdet/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py b/configs/mmdet/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py new file mode 100644 index 00000000..7395c1bf --- /dev/null +++ b/configs/mmdet/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py b/configs/mmdet/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 00000000..f05307c4 --- /dev/null +++ b/configs/mmdet/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,59 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/configs/mmdet/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py b/configs/mmdet/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py new file mode 100644 index 00000000..f6f89893 --- /dev/null +++ b/configs/mmdet/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/configs/mmdet/reppoints/README.md b/configs/mmdet/reppoints/README.md new file mode 100644 index 00000000..205a8732 --- /dev/null +++ b/configs/mmdet/reppoints/README.md @@ -0,0 +1,59 @@ +# RepPoints + +> [RepPoints: Point Set Representation for Object Detection](https://arxiv.org/abs/1904.11490) + + + +## Abstract + +Modern object detectors rely heavily on rectangular bounding boxes, such as anchors, proposals and the final predictions, to represent objects at various recognition stages. The bounding box is convenient to use but provides only a coarse localization of objects and leads to a correspondingly coarse extraction of object features. In this paper, we present RepPoints(representative points), a new finer representation of objects as a set of sample points useful for both localization and recognition. Given ground truth localization and recognition targets for training, RepPoints learn to automatically arrange themselves in a manner that bounds the spatial extent of an object and indicates semantically significant local areas. They furthermore do not require the use of anchors to sample a space of bounding boxes. We show that an anchor-free object detector based on RepPoints can be as effective as the state-of-the-art anchor-based detection methods, with 46.5 AP and 67.4 AP50 on the COCO test-dev detection benchmark, using ResNet-101 model. + +
+ +
+ +## Introdution + +By [Ze Yang](https://yangze.tech/), [Shaohui Liu](http://b1ueber2y.me/), and [Han Hu](https://ancientmooner.github.io/). + +We provide code support and configuration files to reproduce the results in the paper for +["RepPoints: Point Set Representation for Object Detection"](https://arxiv.org/abs/1904.11490) on COCO object detection. + +**RepPoints**, initially described in [arXiv](https://arxiv.org/abs/1904.11490), is a new representation method for visual objects, on which visual understanding tasks are typically centered. Visual object representation, aiming at both geometric description and appearance feature extraction, is conventionally achieved by `bounding box + RoIPool (RoIAlign)`. The bounding box representation is convenient to use; however, it provides only a rectangular localization of objects that lacks geometric precision and may consequently degrade feature quality. Our new representation, RepPoints, models objects by a `point set` instead of a `bounding box`, which learns to adaptively position themselves over an object in a manner that circumscribes the object’s `spatial extent` and enables `semantically aligned feature extraction`. This richer and more flexible representation maintains the convenience of bounding boxes while facilitating various visual understanding applications. This repo demonstrated the effectiveness of RepPoints for COCO object detection. + +Another feature of this repo is the demonstration of an `anchor-free detector`, which can be as effective as state-of-the-art anchor-based detection methods. The anchor-free detector can utilize either `bounding box` or `RepPoints` as the basic object representation. + +## Results and Models + +The results on COCO 2017val are shown in the table below. + +| Method | Backbone | GN | Anchor | convert func | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------:|:---:|:------:|:------------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| BBox | R-50-FPN | Y | single | - | 1x | 3.9 | 15.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329_145916.log.json) | +| BBox | R-50-FPN | Y | none | - | 1x | 3.9 | 15.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | N | none | moment | 1x | 3.3 | 18.5 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 1x | 3.9 | 17.5 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329_145952.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 2x | 3.9 | - | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329_150020.log.json) | +| RepPoints | R-101-FPN | Y | none | moment | 2x | 5.8 | 13.7 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329_132205.log.json) | +| RepPoints | R-101-FPN-DCN | Y | none | moment | 2x | 5.9 | 12.1 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132134.log.json) | +| RepPoints | X-101-FPN-DCN | Y | none | moment | 2x | 7.1 | 9.3 | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132201.log.json) | + +**Notes:** + +- `R-xx`, `X-xx` denote the ResNet and ResNeXt architectures, respectively. +- `DCN` denotes replacing 3x3 conv with the 3x3 deformable convolution in `c3-c5` stages of backbone. +- `none` in the `anchor` column means 2-d `center point` (x,y) is used to represent the initial object hypothesis. `single` denotes one 4-d anchor box (x,y,w,h) with IoU based label assign criterion is adopted. +- `moment`, `partial MinMax`, `MinMax` in the `convert func` column are three functions to convert a point set to a pseudo box. +- Note the results here are slightly different from those reported in the paper, due to framework change. While the original paper uses an [MXNet](https://mxnet.apache.org/) implementation, we re-implement the method in [PyTorch](https://pytorch.org/) based on mmdetection. + +## Citation + +```latex +@inproceedings{yang2019reppoints, + title={RepPoints: Point Set Representation for Object Detection}, + author={Yang, Ze and Liu, Shaohui and Hu, Han and Wang, Liwei and Lin, Stephen}, + booktitle={The IEEE International Conference on Computer Vision (ICCV)}, + month={Oct}, + year={2019} +} +``` diff --git a/configs/mmdet/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py b/configs/mmdet/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 00000000..b24c8db7 --- /dev/null +++ b/configs/mmdet/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax', use_grid_points=True)) diff --git a/configs/mmdet/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py b/configs/mmdet/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 00000000..8d5013d3 --- /dev/null +++ b/configs/mmdet/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict( + bbox_head=dict(transform_method='minmax', use_grid_points=True), + # training and testing settings + train_cfg=dict( + init=dict( + assigner=dict( + _delete_=True, + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1)))) diff --git a/configs/mmdet/reppoints/metafile.yml b/configs/mmdet/reppoints/metafile.yml new file mode 100644 index 00000000..cd4312c4 --- /dev/null +++ b/configs/mmdet/reppoints/metafile.yml @@ -0,0 +1,181 @@ +Collections: + - Name: RepPoints + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - FPN + - RepPoints + - ResNet + Paper: + URL: https://arxiv.org/abs/1904.11490 + Title: 'RepPoints: Point Set Representation for Object Detection' + README: configs/reppoints/README.md + Code: + URL: https://github.com/open-mmlab/mmdetection/blob/v2.0.0/mmdet/models/detectors/reppoints_detector.py#L9 + Version: v2.0.0 + +Models: + - Name: bbox_r50_grid_fpn_gn-neck+head_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 62.89 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth + + - Name: bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth + + - Name: reppoints_moment_r50_fpn_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + inference time (ms/im): + - value: 54.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth + + - Name: reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth + + - Name: reppoints_moment_r50_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth + + - Name: reppoints_moment_r101_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 72.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth + + - Name: reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth + + - Name: reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth diff --git a/configs/mmdet/reppoints/reppoints.png b/configs/mmdet/reppoints/reppoints.png new file mode 100644 index 0000000000000000000000000000000000000000..a9306d9ba6c659a670822213bf198099f9e125b1 GIT binary patch literal 1198109 zcmaHT2{_d2`+nz~)+4ltlI65#8QPF#)TvMjS;jU7A=_B8Gh?*Mu^$O1%Sf{C%gh)I zQ7B^UgE1qdVa5!?h(W)1qVMm2zQ61GcU@g=6YuAJ-{*Pm=f3af6MkD?NAP#?-?wbp zB6#EaHKQ$Cc0_I2vejwlcJMdCDk(GImtTC0bgpbE?vR)Qf7t5ur{14iwv@#RtlrrM z{=Dnnbqk*@TO`ByKYxjxn7g}W%lE7s*ZwpKw4SEz2z<^6VXV15(^DRtK3S9JsZ6R( znf7R{sTn+8qnaWirq-;?AxThMXxZU(v{Lp9h1+7kKAa>i(YIZH@b?}jN$fj)dF1>t z?JN79MU_2W4k>2sItZQr`0;$Xs#)F2$P(Ga#N@BX|DPYn7Gl!(f^}~G_~ZEAs9*TM zexq8Wo9e=q!7!@(b)UA1L0nJ_EUJl2cl>KCw=U-Ml@%zpgK z|9<2TS0>X>onT(0pXK_Yd3?xsA;LSdHQ0u~Fme8;#NYrph)^LXzc%m73{-@u(y?lm$is1hxXpNqi zc_Ciw9Eo}o%1W5?vP6G+7I9M!aqGtiG&KIRQga~%g#wddP6Y~MS6dwxiu~5iR!i~L z>BITG65}uP&0DRXUo~pm2#WmOC6CwNM`3we#xxfWVLpD@rRzJfQ zyzs6kRd^TRJ~3mv_E5kn!`Q>t3|MFjzWR+YZKB8SIz>)PA*`ON;V7crD6NAQNh;SACr z{IByVv zRpzT>{~T;$Yv!zud1ps%Y}U2Svyn7~!%GBI%9WrV^;l`i%od)a`(Mq_?IoSFcd34p zSCYZqvC>%ou%N_Lb|!9D7>*XHo20ao=TREunMjzG^~9K$E|bYh-~`?3 z<(@}*qFz-CS<&}IyL*mfubNh2qR~%^Ix7b-H;HK!<|2QMTZDIP1l5B*P8n6%c>N%) z3AKQ=7-gxt!n-N@ldQh53+Tl^Lp?R~NKVCaa`mtK-oxQ$jHC0f7F!mEzFky7Ovg5_ zzvU|7*J4WUeh$LMZ|vQr3!AGw(rWs+xS+u7Mo>cWYliIH8!i4FNbj&J5NmliIW-!G zJ$6MgHX1h2*gh0|+=8ezkzeAex-j(Q>x<{N^!^%Pb7eznZN{C4@4h*QS?E)9zvxDh z^@;z$U;G1Up`WXFFm6idQ}UQ{Ns+?e7+cCNF)TSEM{@wzlA}#;(FoJqE)1{MYbkNa zebvg9;v#Z)@#e?A-eg~ld!@*5cdJY<`#w2E?C(559oB5t%1ZEvb>M%8H^PrMp6Bw; z@=5fZBmO8Mz5k`Ba>2~|(_#_QOA?6f3iGQWF9e#_-}B7tvru<#MZF5Z8tUJ9!Gy~; zONl=4WQZa;$!}5pKD0OH)1>}=DkO~)jwKydZ429-N9;GUuGr7V7)z5c?rTc1NG5Rq zqGQV~p^c5S12zN5;N)Eq2I)CEZ_Wo>FUC~GM-g0&c!~mmdgSu+V`j`jSU-q zmOP}ER{JcYArha<_;imn8iXwkg)eY=`rU{ ziH@fI&LNo0=o$1D8|TTESqJ(oE51Kc|2`K&$3)qr>?w&3i-#E*PIMq! zv8N{d>rQHO`i(&?ST^j6pnGY*2s%-nnpkvbj<=Y~Aj@m=GRq)l(BLV5$-2O{ZAkv6 zym@$dkjgU{7HMn-1rEgW^Ho+6sN>FO!c?+rr< zdQ!ZrUN4h}P?AJn-SiL=U2hxwOo*3TRKHZ!>t|oTJobY56{TEWckZ_&1eCGEfwq8^^^;mNeZ*dMk8&bdB$HVWuJWLG zd0;`cpcSW)BVhk3q3Zjil1r9ir^?IOSKo%xI!rS=k$8n{F(k%os!!X@9f5TjMvoiiWD} zV!GDMF9OO$8Ao^>>M;W4{*<4OP|I{aV8y9IO^_S?TU0V^m_3$D=AG z(I`*H1Fm+xQj6l{joZe}PQ4@Uv>G~o5H756IY`Yb4rC+)6fqNHI77Xy z?IbqdbHcTQd37bYW4nq)?Rw{3GE~O5MC`xzMgCnjy@T>Cp{5sKDxTpeLV)K;RXt&#~rQj`n7CYhMXxV9W3om1%7+ffeyp+5?nS=gPJS$F?E zZr8*-rQ*V5N;^~S9uWn~cp+@Za9#7mBLmjsoF2+z8N+YTw+u?|A2*|kT!??mUrHx& z6YU4Smw%Pb5OFd3?BORZQrxlv>0|95M>s4hF1j%Wp4|IdWCfuHU-?bUtv}M<-l_`s z#x1riZ#1d7nOOka;diVj@kQNXC-&T6XjZZWi2^yb7Wh4D<=z*|G`)6T-9HH@#!b;3 z>cMk~^c3vmBcJrqz5J}V?U((nT%@%j@yki6{)%JA`k-#a=Az+=lkrIRJeL4b{t{nS zY+|I@zb%mMiVBp4fi;CKm-;=!t;G*#f~<;ojoXRj?N7f!%mnqQdp~Pw(xi@#Q z_RG+&DjoAGkmLO+3l&HSQYt608t*e*_E^-TKW+7y^0x7n71Z4W&r!uSEu`k+$B8v- zkv%>UhowC0MH67I6ZjsFkulIJxEl3Jsj%JJJbv^EYL_+9Vywr97ManEGDCZ4=I`E`~9lrXt$~-_Hg!l!A zIn%ph&rt@>p@_`tZ2eQk!~MLT{WR}>a%@CD=qpV)+)G@nnVHSZuV26YMML>~mDLL$ zEU2KILlk=GJ`XVo<*4rf=d&kjXEXntwrDs&wv7vhJxhSye$~fQnC#o-#Z+?0(p;Cl zF$&jbcDK77NHne5bw_{zDraBmEu5Zoz48(3oZ?|qM4F0OqM0d@)}V{>FfqA$MhIT1 zWr1xOF~F=5uoSZl05q#Gk!&R)^U@n2H;A+2-5M^QFBxDXQ>(te@^CzWTGA2fGIui` z1cg^tzQCc8k0UO3{;W@bD~Uz9&y2v(9Y|36JuJWF-H}rY>@sDmB{0W|*f-q%Ebo{{ zYU{^2zMC;yD!pfOhN}^BE6P9hx}lys2^~`&;o3B#$>2)kecX(eBZ-GKVj=z@HR_3_ z*P^IR%u_tYzKUa+Rw4=tC9Wcm*-J1bT&tXH!2Wj8zptJZrK0gm>4~shT8`%=sHh$; zUi=N)x&3ESEdTPTOz&5+HRk%;M7IF+FZWmb)IekVHq<8N8}9VfB{z%7qLyMNv8zk4 z;TzGg%QE28vq)0&NrZhsnjcz#s){dMFvu?M$3@kO=Gd?eAlIqqdMfq_u=ZM_j~X!ZHDFHFU{R6y-@x-l-?tO;@xg_rv4&*ES6JXs91;)jLgnS5nsL z_I>r^&Y34=_al~Po8b8_`$D24)3GdimGR32xIqmj-d%P%Ew2DKdat~-AfKkHH<&)m z%ZXK{|ek1#wPwHW#8LX{Nkc zd%7T#t~}-`ri9W-Y2=AU$bj3}Ot_AckSNqhqA^ zT`rWVE<1#nAB>Arkji0*@*4e|ci5x9!?5V&QKB^7V6M07F$&GJToe~AD!vn^AiDnt z0NQ%&Kgq(U;agdVF73iB01hw{%P!ZKehpQnx890+`ozCkhepK@9fJ+ z0V#DIGV+wN&$}Um!Js#wy~@#P&eJu29oYnZs;zX*e&hUXe?r(JNQHv|pgs9hfG`P| zk(5dI=t~EM5#@RY+@s9U*&S8al&O;qDlJdT%-?ik;$CKXi%eJs{m-nFB*BOq%K& z{?#((39~?CvR-Vvsd3NZnAvncGQK~T(dq1WNF<~9@iD|jDZE>foxjcP?hFT#rfS@e z*5i-U|0&JM0?o=t0Gy+p$byfjNhx%DoRGAXyZ~D%TLE3LkhBQ%wH3nYw+u(fxzO~ep0lT^L$LtrorlXS+?wM2d zv#Vv?&`}ULX3A7yBltT10}(J3TQ}oUe&-@!Z)bIeqvV@acSDVt_)^G?BzlDf)p60n z>D#pR=0h1cTbd!n>i#XS8Zz;3w>E{OpgIrUN$T>$SmAB{_aoX0e3d6+Uiqi?d}>l( z82bieY94RhW($A1(k>I8ya#8H5{u}GRu7deDRlX#5zo{9f$8>q{7*F$y{ir`lM!eY zKs`4`xlZ0B+b1!b&Xx`N$@a|;SdLf(1Ra|taH>V03*3}7xotTbVuLU~yeHynfe7_D zE_5WtLTe~^H5ud_Kc2|a!F^c@clIhAymZMwVLN1~cruMKT;AhPPNpkv#+J@cR9VqK z&}HiuUxb(VnmeK13uqC)IgPsSt9m!vKL+%ax6ke9NQ?n>fMVxi>7T&`If%%LnaZ(o zzddv3Ec4vyMz-;4V-;*AnC=KtqE(n;Rt+Ws$zmpxY&I%ZL(FD(eAThNEVH4iJlk{! zd8C(WY*y1U81k--kZR)f0-_^nIY)p__xJYp*I=-**7_8c!}^mI#FV~(Tea0fT#oTD zIxtG}AKafl{Zc)g3Y7$`@iZsECu1OXBLW6CBjC^WY*+B@VKz21y!e^s@|O@ZE0{5h zi*Hwu_tUwdm4L?XXc)5Bc~nNCtUmLG3r1!meE`K1wRQbsme%ScmN5554;4Pr% zhES{dCv|4}{%(Tf);=qV1*Y7PEeTi2VlSMSf&Wx zxxgQL69%ZvnK;RwI9s=#b{C7#WYWq@lprDlz?&X)Pygf_;ssdE-FVEY#!z~Gvf9$s zt?C8{%CwTs!sv7~^ZdDfJ&}M#337BeC25}EI@fHh7JL>l7&^h>@}ggspr>E1`t~mVe=P(H6s=Dcu`&gsPmgL~J*Ihsc+{PkcQZ_|a!U zyxb@=apstXIuZrbFXx$Z03O=En1vgzzue!mSpf7B?F zBJD$QG58exe9*m=e7?TwN>0gLm%%T_9_eFFI!$>g;Kc8oMLAhMb3885Pp?n`(#5re zJ{%`(kD9mit{*V>k>m#c-(ylZHE|%p#bfNwqJ+c`>cTFeYUWjb0aYprB4im@=X)OB zrjp)Ejo;tfG*+Z21b>eE`|!bNHrX1mL3%pzFbmq;AI>vJ&K#uTYJiZ?sgiZ=vfJao5OLs;ih`N+>U#?rRS|1y}7frn*#wi%+7y>8TzS+1T2W|+kgf?uXC>XaGS#E zzTST@OttswyN2nu({^64=ossplqc_@9z>VO2GJ-Raj^n&opniRXv*T$^l_+qhV0HKS2dk!K-$ zI?s70dt{Njkq-V7jJf8YD^9%52@bQhnoyltz}*tFhHSaSA!?_O#(5}y{wK}JPeT=v zT)+V=}#wc--f|4`MfwJ)9~t!#~vz9I6_yFLvALTHF+48)H5wZ{N$O#%HO z(Zz}9gO1iwqh2GAcDzH`Nw~OtTez%>BIJs4Y-C9}# z4@K8HZ0a-lpyC-}vp(;5z|%)>2{xYYdO-dG*HRyS8WWNbVu241;>>nK3_dgWRx5nVtv$ z4E=2PTEH=#0OC@X=OO0pwkB=}lEs3VTjoW&5N{6LzmaJ=-Ngdni1+tt!fGYts(VJ@ z$5v>>h>xL|=a;I9{CbvVQ_%*}-s!(li!R?To27t+g{BSBab-0f)jN^8y7kbzxPHm1 zLXeil)~AOaPN%yk2QWT}hRPTJs2>Ha0sye9hfGO~pA(98CuD!OeF0&gNrgN-~c1^yF$&(o>xR04l2m zW4ZoHJ<_7GrfTe9=x>srSw=sRerXEcIrW{p%fK<27zVpB7KLgBKrnH(G5-*0b5JJ%w4$Kb%hDEH)V z^QwoR#~BxPf0Y$0o%wt~E~oU?3G99R4MzcbhG-;v%@j>X>l+edtn?|vxR!+;i^B`O ze;ow(74qjAfV`M*ecJ7afLl8hhrO5N?-{NBgf9QP#J#1YhJnQ69SZV2W6>?c-Q6^e zSk|TcnH{ek^83Za;C6F!W6^Fvb<3N3G6#0`ym}*60pu~(p*r6hfhN(Xw9&``!LXUK zj5qWWT0ms|sbac6Pb3UN&tNncduI%AW$?FG=^q!9MSL!~&V7hZv(H1B_3Lz!>v z)BmPf`R#a>gJ!0Ele0}1R8F|{)PmzP1QeWXUT(FgRbf#9C1Jq|b2i}WI5dhp;%md& ziM}b&eBN%U03=MzD(o4b=>FR3D)v;`e_{HTJVz`EHf3a4ObrVhC`X|Dy2TeIn23-e zu3QM_t2hXd0zbfEOfS2+{a!;n(Imb7zeAb3-5y9!2FXfQnMUmjcJZ%6FC-bG!4^Qkc4fae2Qug9N(n+s={Eb6+v^Lg<6?33y00rK34o3O zLWasO?ss@$5!g$|(c}YIGUR;yH30XJqfg^kzOnR}=Z`jvMI2k4&b^4afX@7KT&j;> z6-UxaKMM@NYxDODY_AJ`JTV;Pp(UYrNs3RDI;9SMDgUVObSD zH=f8PrK8$2WUG?AvK_by^9aED5cjZ1HfiHe|GgzIdnztE>x$N7*0CD~>LnI|Ac2@< zgN|I2haxEqzjyVyS9$lDFyeZTy79h+fh65tC-c{$C6|gDyKjyhk{pl4rkx1Zyu~Fx zH#K2dYS#f!(my**7JzjDZ@1Io)wH(@orig`GoLu_6GX1XM;o$9yD`;A%Oo3Av?~ zO$IreyWa!C@9IR8Fa1;2??d0WC{cSQK69yR*95*&2Q9hWG+|jc@fsy)F@>FpAH1QK zglf@K&vchxFb&d7p%PE11XalBJx5**Y1IC z$W7D$}Y#pSN&tV50RWJ6WuD@&HJwP{I)Tx~<(`)tfcYJfCGPZYAcM6aN zz*v)}D6)e>+VDvHtj}+RJ5EOOdm4b0NoP(DHQB+}mn%vr$ieLWLq1=sg=}v1Jbjy_ z1k5hV1DVWJ`s2_`)$o>K)fYio z9+uRjKBe^k>Yr9$%8PyxOOgv&QYE>@HvydhxDUi}!C~CAnhXsx#m!|hIay8RtlJ$` zM}`IItV@E_456~m)(W_wKs+6OWnpD;15yn9X9%XemFTMm}RAsx`a|nrBng%wPprjBXw+Jp=fMl^P zD|sJ@vh5R(b0>JRJY^5w)=d{tuNss91ZXegcMqqD^0(9Lu~|f<5G(8KS%vHMnqCQ} zGc_+8yCZat-qYOaFD+Yue2zs-b7ioi0oP~Rt!5Ha@!?~|*m2_7Iq}kaDW-1fnd4?2 zayhxf5GZHumUJ3#@{yNiF`c{39z6qMHIvVqzfplJIZjwZMo`*7^c)sG#($Q}PMb96 z>K}!NhC<%d804Umk~sAyw{=%@qEOh?74ZYm;H~+w%OLK1O1?uCy9>9 zd0O1t3miD@^7norC+^0dD47W?CeAxVG#3v{os5K7*Pl_-2JAu~yLRkp-oR}~&yhrn z4nWp*0rHg|Xq4VTD|FIs&ZF9Tp2j<`(xVoWU)(e2bz~8zEF!Un_R z?S!h%Oba*b3B$29GyepF!x>updn^BBd{1uV^*6swT(JyFfDZ-n!jT-a$hUeHw=@FR zv*{`LX02f-9c|aSr}na^T`*G&?pAv=z2H!gc&Tj#Jf`}HI6CC}Lg`M@C8!~=Q&@W% z+_*iHu@~!;Nc=Dspz&P-RL0_w51!`QswRUB7hUwJoC%lq^)`Nj_tdk)SJ)b#db-`0Xlo#vq zICEinwH;V3Cu{h2)GfY3KOe^aNPG5ef<#SzT$_K@!IV=2f6rF+)5Z;qD%P4d}Uk$PJQPn+)K@^J;Z`)5k7z>!x|j z@3VXbt*Z1uGOO7&_T{M8r#oJ26T5b)p;X`c2#)+PzrEbQsWeF)OcB=vZUZff)SXdO zh;JpRvFvq?zeBCyF(@E_^4&_IBkKanQ{I*xn5&r>5+7wkN(#H^6_mvfW;Z!lw=vZ5+#h7I_aN&_)yk?_O14j9%3GjEKuQUY>_?n`!l(x&zF3zYv`(@f3clb zWwPpF9(P{!bLw-~=T-7&a!-TCOlwRG8p{K2R=sd~0d9watkQWK&}?E`o>63TWj$%14kqk$R|K8lz&UiE-xE+W#LWGd3nHAPwLK5@ ztQ3K^=cLo3bT|~Qj+NS5@C1{TjCdud7Eaz96^1`sxRi-xyOFa+S7FYFmrCw88H)HccBEdqf3&B3pr}w~M!Zyu8)b z9U9WkwsB)X)z2A?rWkVN>n=C>De`Dy` zSJ@>eP*K@CPjEvZB2HhDCV>*#ZlG2y>{uKS{|>+4yVPKa@;9l~#F7l>`vD)?cBpmN zLrg?Ud4NmY(oIwyl~Bd{^xpCD>>B@exI{8|^tuuL#=flhd8v0I!`y+fX2eZR$%CRw zkgrU0LQCq*TpS;pSTp|w^?~%T;{n3Wa{s-#S^6Se?MgUl1aPW%#S*8Sy@7of;M7R` zRH#>h)o%6}S0<)U$Hu+)BP0{ti62so z*6ln?Uc-BN ztL}+|gvUG$=#nC&;5vZq&UOo$KVXPZv?--@r=Rq`EJjOl_k&Psr8m_g?FO;OA$_bf zDwSKu-f@VVW?($hC7Ggtf6JNqYodlGtjk~1Wv9(Mc417|L4_mY+*VfLksJWr0qEw$O(@iN5CQglcfDBe^Lrr=u1jDbE z(I{}>#tu->6X=(wwAgqWrSCoKVTn&s6uGY!5dWnZn*E!YhfKjs`OjP#k8oQ3t8&?P zYoA^{TNSa#RPTjDhYBX(`Xd1mQv4N4DQ_U8mvPJ?OT-N_znt>jo+!WI)JVP|weEbf z6QZoB;hTO)_(>h!BjABqSeU&|xaQ{)lrU4+JUxV$FW)vz$T<#B)!g*)#hn$9`rMa+@1smK|-Bg(>ImLY+^dtO3?)nvVWYN1iPrw%!W_K>8 z)ZYMndMB_6(U8GN??*tdFa*@xjA2Hb=2dD=)$3Q5z#5WJI`itOA~lPT8Nw7rsONeC zR2yFpWMP;9!GS#;r z{gNm^w?!`sgmOz5s2Kx;-s<0yDwl5M(^KlS`MNMLHOfM2O1tc@#<7j9EDf^`2mDTZ zHw$EZk8{A+89F9)HPh;%RHbuGBLUkLF86{n@3FJ#NXYl(a^4+YV9N|?GI7R3Vf)C; zq}oNJOE2PHKNj-{U0pf_mi?e()41wv3vK(=4%iq_t^-R4qDbU2yS5vPyWyjc=Ju~8 zRm+SDhQ-rf7c6Ob7OMNP@!2hGia=&2tt{~E*}I=b_+DUOQtpc}&TD-S@fh$?5gFv+ zNnYo@bt_p5f&{U-Q$EU5FATjrR2?D{$rLE$7|rjHAF_=(YQtaz24sP~8%j#&o9fXesVHceUc*Vr>em4N`Zf&2fKu)} z`wBATqXq-fczW411}{N+WjLz7F#Xqcz`I=mGFDI1;XwsoQy4{dWZsfJ8 zhKMijz&JV?l~)G09TE$L7@+_gJ50FKg+dQmw*Pf0Hx1ZuO!ebxobW1&8C8A?2a{l~ z&G}c`w~z89fm;DNJ0^KkpKM>vpL4mKvpLRjp)`9Dv%}ul5RnO(?#T(Svj90^+g{DK z*cA0V|A)kH6X~f{t%!n_PX|_fJ5cP4FJ5_*aq^#)pdhy+;MzlWj=)A&r2bFgS-GWZ zOn43aw3NswcNl{)di4}S%>zhqI7qpS9m~q_C#yAb;@IjMa-;Vu{;a}fQa_z0NPGj# zGi+)(gb~7b>P!}H0Fgbv7Q&We`v)k}+Gp?+neH}YieEfB_pc=?U$a*Sev?~>9S}3% z_*$%DAfaJ}x4$o^R8GA=a3B$e!ZI{H$M!O4IaQW<1cA!s0VRULoI&=fyrtQLDu!+i z-?F?4hBce^s&`Di>$^o}7|}}|R7T6$)7FC|p`~D96|qpbn!IQ|XaT9GhPbRR*dGh~ zxUsO=zsgVCa~EG~PP-y$(9!wON5$`Jt2-ELG4%ssTi9HP#4v1Y0ckprZFPQL_kt&T zuL?}!VAOBIDe_WP_bU9;ZZ2xv4~@*q7@|*OD4@Hqd3g);F;lE}nJdm*ZRq)P9YBKN zO>jY?jIF1?UIr@mn^1VGf_wn_@csz(X3h%;rjGZ~Z&5;=<|g5RGOxTJwCW0>1zS#G z?^Br5=>7mG|0+&KZLWA{*-3M_+002V#heM(wTw29?G4g{6J8Teotoq$y6fn%n@?M1 zk(^x?+;3rL@3i;$7^}b$nqLP*aP|?_i*br$P`2Bf|sbXwDEC zh%?!EoB8F#gAp{0HShug?OEfZ5n#v`duI=lRFw?JGr2B3Wk^ogR@r=4%tPn`XYjGz zQb*!?T^#Jwm0B8$52d^8uRnhZq2={{eDpUB&UZauzPHh(y*bv{_?(@9Isr8+SVDCE zGn)n+x~bKnp*450koTIZwlAqx{}wDloaer;LL$+g!?lb zrPP^x>6EMQHz@#Tw@r)zh;?xWI*-^bIrhD-;%?r4E~1+izn^x2FYn(UaQf;uwl~4l zw5EmM{9kt3ur55%d7$D7n+o)<&TeA!y(_)$e``^jdB0Kb99aTJdCS_-e}2E&iZPol zuj`w$T`emA{pKR9@fWg>BEYVP)kjXA(QPY8@K(3cQOsd|T229WDr1*HE!l!xkDcim z&2%3vT!89a(J3Q7>ZiZJcj)b-J}=N}+e8Z@#I;l)ambWZJ;FrP@9%r!fD$1OmT5u&oV1u zcwdilS1`b?t6n{grO-OyGsWTj=olM}jQ{|x?)RrsUwKOsQ!}>RXj%M_s;8j8a=!hR_P_TBw=;E@qGG66;PW+;s^DJbyrBsGORF0i>Ie=e0vgZnP8>7{hP) z^QfRk*A1Td<%SdL!5@xC3UUjL)b+D-fp3IAcYz9u0r0r*^(2(n}Y2|%v+4@D?`>PGS&lFi{*tFA@rHcs8j)w6luc;nyeeD}{CfPqS6 zdegDa!{Gok1_17pwIf*kh7skbSL%=J7+#TywMFJtvF;yEXw|^b6a^__a2Qo=z%H=~ zB49SSEz2@sw%j`9;LMd`2yAWT32exFIzQjl6=SwwT|u;6)$2Aqka$5lipHANXaPxU zvCE8Q6~$G=&=PGWTIIi5x+&)6_D{fxmbj%?+VUesDksrN7diVxGdNdYEeyR@60h(s z0bEmIq>9h5@U9Ef#?vGbsbBA3#9#MJPm#p=`BgtpWF_uC^<%_kr}joCw)2XpOMxrX zcTO8H5YwiBQb0KdJ^_?Z-hQ)lR;TeU*YAHSC%J>XRCPDko#+}#L7zIG1~Krc&1Z>O)IjaF9rJX!;9l1z=|f z<81}x+3S2NF{hYrde^TV`e)Zz86;C3+4l>-V{aN3Ykqe+gshqX12dbhng=f_$zQQ4 z(#!=89bkPL^btvdxwLf^paPh#i;{25|uP0<3pUl_t_~xVfd&_zFAHIlR+NJr-!eATf~r;wqjI_*PS7*5#3J>)`ts7A_>9# zJ%OH`U}lv84(&kMc;Qp|!HlF+$E#fI;2|q~4c=4xS@t`r&M%o1UuebUy9fI6=YvDQ zqBl_6ADfP+Ki&`c&PA(lOBNyN@MjX|GhbEu_UxJvOClVMg#1oj0IGi)6DV3z1ZdQ$ zX5@UAoX1cfSvo2cpYEk=2tUJNs(x<^I_x)LT3nL|FrK_-%Cjju?~ypem8p+74r+(u zYvRb#yv9kubc^M?=PR2G!U;T6=JIZ%a{`r|+vVC%XZnE<4|?>P6|PK(OYkEAswY5@ zLcQ2wzfM=+(UOgXlAevp%%4>M&U2#o1Ja4bDyL3W$DISC!K56wr-62a|)&fOV+o+ zm>haNwb4kD6s*hFpv+gh2M+Sv)I&i{fC|fnp~)A)@|GWFLmsJ{+mWy;P(bZL4u9Z zD0kIeLDX_<$49xY>AtXG-!jPIHCW@H!1be)>RCS&Lcq7o#q6hCfcmHN2k~f7MgXu} z0z34D6OQRzEGeTqB43+qUta+Yr$|HrnVV^1{UUZ4;I%z4OE9d*U8`7?;Q7G(qJb&r$1XQ;V z2dHs0skg`_n{HfGu!_v|qvDyN5H14@&oCFm3&bClNdmwbuy+}^YtAEoV4VnN{($qj zLI`o)r-UM{epqNk>1rs+hPT^Ibqct9A_hWC??e)6CbH23noW<7_WGDt(o?HV_rKyI z$GZxuLvr5G=5J%)2{~>g%AelDOIrvz?pin_4+e64)B)^Wk;vDIMS`%HLyO2mNx{BJ%l{nPZo9pyR@&htcPB`&4zc2 zFK@NYA^EOQ&s+K?9xp%{h(-wwtgg z?4zB#X-FPkx;`6>YtoLFKn}R0w_V~+>?6FJ2j(nJ>SfX6J>|bE9r2E>7Ob429}usx z%b{jo1YJ?TWbNy4L$uXB{Ow`n```Hv?RNc(xrm$+poR59#4z<2UC8lq@Io*f^VQ1! zZl!pVNcy3G(tQqD^u_l0%g}+{>#gF45kD^>NUuwz=U-vnPiC3Zm!PCsSzx@ zU3+t_elR=quzuBv{0f2Ap#_VV^ZyS1z`x{0oj!}m0bvYt0hxvpv>v@uCP~{KT9Zek zfE+cL9S@9Lv(VNy?g{*^J1MKUdB~R-EBw_0|8&ahTqx>oDB}P}t;FE3fcyBhZ!lV` zjQ3oAU9_$gRaNwIto2Q^fkp^(uPEnA8I-~I5rY{uFvIx-Py{Ys7OdO0OL@)nL5=`f z%uehUdwdC$Ghjpvh^qo7bgcN^Oku|&3ME_zH)ECD?GuSf-{a7L7`)anQ=cod< z1thBuC_OveJs&t*q)Kh$;dnKrV>T1NpvUn_w}&h|r0L~1*_Q&0f%OCV{uKoDiZ!C1 z7r$_I@Dt?)hfg|?Vg(Lxv5-}2H$BO7Le_)4O%PGt1B_%qmVt*<19VL~d$<WH=MI zLyO9CGeeH$6f=QlgN`Jr01kjQY0(GJhC9fGSK*mlM54i1VfMEUVl~lTu zU^7>+-wgS_J1t_^%i0Od1b!kq6|oORf7lbSEzYcL?4@|*OG(|scg zL_vy0C|^4xQ}=yWl7BW7B>E2teL&?tTjvGIrp9wh6>s2`?^vi-m>z=rlN=T0!xN!) z1^A|}C=TS`h3m84PwWOwFimEaMCkluo6;;}2P+ytelk zbcL5!EBWWWp@_K-_Rt9!ZDP}enwL^f(DquM*!I|58rV(lG<1tpJ0<86duMOv6JF? zhhwFr@`&L&!zOcU+-6*fg@k6%1U~!SbRVc+m1m}MTV?Zuo&NY+pviC1Dy7Y+B3+8Nzl2{H892`R}ErT);s?-S<_o zUQVFUHy`2CtFpnYsMrB&>!Z1}Rik!^&tl-ZZ@CoKTIVb zBu$v0L-_b4Qqj%rAc`-R=qTtr%1to zmQOhDM?NAqT=_L^b>E1^Qh5uw_|70}iW7rtQf_jT0<#)LJ-R^2DB|3aaGgG%mtSn` zXr0Fh$#Usu9^>0Y7Y`j`C1Ua8F65a@X%!ZcD%jpQV3&}UH}(?*j<|FGUX_2jf^%GM zHl7Z-G`ueo$G2dM;98o-IFH{df$>t{#c&R=szgqRB)WWP-p?Pe6KFONGS4P63MtDh z3(n%!au(HhUXMIqqJHnj+81{tPx3fcTs#A4O)h*cyD0ahH);S{I+iw`S-ojT8ZzwUP8%h82;X%!Y(5wXh8$phV zdAf`Y9#%L*?~rrxH=;cH${*?e*6s+Ud*)RGX&4NoyMRvE{fYI(4@XnC{C>n!*vRGqm z#%El?n19~hHh*VI9!vIXt#C@N;6$&>Qm>{8$Xu(@5=}Ft*KwmY3oEa$Eglpv0%H?Q zC3$U-mvZLfc$M5YcM5tcYA9yOu1h?Hl)Ej?#q?j$KGmp^E}*bQaqym% z%?XKKKno^I5y%+!HSecX9SXV80lecvS@K4|-1xZF8GI(|&BO{AEMBVCM`KxTH?#5S zwb{h*FdDM*M6Ub3L9V(~gS>}_##dLz3@ zPOaNC0DC~H(HFJj7XR0{O08`R+T=ITVSq-{SR$#9uM^zw~x0dbrf|JQfN7nGH5~B zrcLQ*I@m32s#kufHzl(Do@3CWf$V;##dlY~kc*#?8LB+aV5u{%*W3+_U?g?d$3-j1#^e}GvnIj-Of{B7x2WlC%-$M*a2hrqvk`ACo^ZN z%Oe*&>H{kd?o2byEmH0WUg5|U|D?gm?omsR!hvWiOml!&9q9bYO%JJkks28eraz9e z-%~^?kULC~@ulVjlL&O(S*sx8X#4wI=ASQO&bO^Y@Fw@C=YD45Jfbl|iv>+$ZO894 zd%^VGBn3Eka$S^MY^^L6W%FIPGMS!O)@? z`cAvXm&Xk6EOWmQEX(}cJn7$0|HyNVKgINc-vpKF86#9YG=fE=m)?+{7W`Qemeagh z(!$m8n|!uTz{&KS%Og-DEnMj`t=OkO?%BhnoCBAmOdyPRk!yohWi48E!4Tgn%0?)K zfF9=}_*n@X6{ddtn+j)w`xmm9Y@P?nirtap%-w<0M~C9cA-#>kUNq__wU*X?+5ueG z`eNi{)~(QsL7@ArJ>XeHipGk-;ou;3B)>=!&jIG`g1U^@VX^v?dOU&CK=!OD)6hOOI^dH(yM zPkOZ6VJ8e9!nJfJVhKf8wN)o?i`4kCd?QJ%wNKepVrP-fj^fwWz>X_3JM-pM{wcJP znC19&>|oe$=)}>H#}?K(K)DhQiDW0fXjUB5uY~bqwWiG?AE@GgYyXB|nm~2Frj@gt z`Ey`960_n+Tu(eGmaGB;&&`R;nHbY<`WbXKekB-1&^~3S`|1?8-FB3AfI(C4#m5og z27^#zO4O)D;{6`Ile!Z(g(Rj{y_1t8 zQC8ZjJ#M2H-S9N)lVa24%Ck)OumL4O;rfJWW_W(K<(_*dzZ&3o|G_8c&XHVEKVWbd zrJoZ0x_p(zgt9;@VE3CB=MH#Mdm!6&b-$I0a0UVTCuaGb*F(;fVwpn0*xCv`#Y*Xj z*rD5d_kwF!&2*6);~C20qA4>bE7&`W8CiagWo(melWx{E(y?B)QG!gVl~%!TUEkElFDKrs_NAu>P@ipnp6F{WWU7&q`&&>~Q_2Fx zxfQ(jn@|<2_0z8_dVcggvRD2eVah*!^16I$b@Jn!@z8fR6HM)^&k|(I&a#ZGlGlc* zkrbn>e#2|F3nw4TDeYs9K2^6S#pBdHF}t83x-~Z5l(ArcvnbeI|)Yk$n`V8nEA#HL%OAQ&msktq*BT)BhVqeXpzPF*j1PgO}b++0b z6h@R9fqZGze%o`ZSW!yB%xloS?O}FczF;5KsY3G8Y78~=RoU$26rPIh#Sgl1*J_R6 z$LAjbnuW`$&$hyPB4N30>vd)@0HRPvCp;9l`#Sd;8|ZZH6->3=EpLfc6HHDu-&fSx zP%&K&aGCd4#?6E-cgMYCiJuDXt(P70wSm;g0Kgs00Jo6~-h7j3T?zA<1Oa_FSD!q@3hZyUtzq8>K4j9usdHywuc6 zV{LF7ojhk0jRd-6aNwn^xLw4LebTi`E)9Lmi+RilkeXQd|M|>H^TnA6*3XsVstb`Z zDSYTRWQk{iEvO8p4cKfA{^}peVKw8{Bd5HlE?35G^OrVLE`o;!0lP}@_;$w5#~A1iZN6zYTv z*iYGsCQb1{&>F@h?%dGd`!_A;U!-zdrm6aP@n33Dk*1&P3p}3(2uGW za8FV+xT?F0Uw7)MK6XkMt4`or^^~Hey}r4$bmL{qFBv2QDg^TLI5V1C&^LY5!JzZX z&sIQnkhZbk;g8)=2#-gTrx3}Qfd7^_v-^gv_T+70|Rd1tNh#k$k(%wrm~oM`};VgjQ*RA;p?KS8D-dH zuglV|+>fKrrO;=9yznjNg@2t1CQ29x1aaBW;ztd0Ba;PWp(wO!%rIA8~0nj z+K)w64up4kn?qYT>`iq-@OR3cGCx+N^?M7=@OpyJ_TqSRkeykd1Az1q2x+3ef*aX} z;?XR+Xgjcr?$-1LQZYF>**=Ee3ZxfFigCun_<5^l00G1xkR1rdwFVm5`JEsc+tHZi zzn^bu4#$Rn-Sod7!y{=Q5RibfLR!-4*K@77bLQD&opJY?;Jnl)#d4JI@i~{iWfZZ_jbHXlhL8h(+a5gBNZ>RDMc;8~BGR;!6kV!%zhRa=sQ%Ki z5P1H2jx(+BhKINEhNF3TToh-UUb>}FMVWQ0D$uJYP)}n!h=#3p5dm-n#7@{APwpx1 z64}Syki7rr@_1Y34iKf?CxcIbBjFViJJ7nKFU*+Z`ZO%(&Cv*4RVe2Bs8JZv~K z;mXFxBSIRThkiEDv`2u`?HLWF1T`Y6$pAWtEAQ5$kPluJj&0}2U_5qp?-!ByB|mp; zjzFxknu+MXZmx!@FBdSK3Wi|tk>Z1>P~D6U$DvGu-fY(wfV^%|_;ZZNUV2HK90 z+KV{K6T&s&?Sx2!oDt9u4ceM(opxLv|2IZ=_@5RU0%Pdp?-gL900smZS@|948XGA* zup5i`SM!6leX`5eSf=S$Z3i+d0SLvMmBx(Hqa7d`)zET~3y4Nq*0yw{c~)QGbj zfMg%+4MQ4L`f%x>v~Rai?MIh;^Qdi=@vk}Jw;DUuub|uPJlh1ozd1N%ym1@pyybEI za-`>F=`t)H8CURU*SnZZ&}0kEScSm7HB$P(d4EcH5y$%ZcSSj2C#KnvJ_kyg znc+^O=f&es#qG*c$KLHbviGLcFDE6Wat|KNWi&fpu*DwQUsI* z-{+?_PPO^i0!`R6W8{oyJiB{zWE9+OQvtZ&P&EAlCV3eaJG0EGNnb}~rIHe=UUgk- z?%&RKYt%fD2w;(0`2%~ z1}jpZtWA1q8DX&Wf4`{FIwX|v(5(1|v?)Xh%{=L5zaP-Jk_cp)^PZ*`gk~f*YYgjD z8*`dyUAFHN>)W%09N{rD4?|OqRqlDlAMp(#)2^RBtDX0YL=&8$3RC>$1g%)v4?J=2 zynt_0azbDnh%6xP#2xZ_1o$DXV|gqXuu_ehTWMiefW2(}!yVz89Oar7jF%gMCpI*n zCj>Tc9UFp)=z{(CK;S%{BT+lAnwFO5*A)9|CCNm~|MEG?iuR}eaGUc_Xo>v1EZ(p} zIOF%7a!bP}R1XjzU$bd2{?K7abg?qE$t>@?#!1M*bPr~~*gg<@u4G+jBPpNbUUxz| z)&e->Ilr@Q8zI#J(^z(Pf@$VacZQprn1BB`y7TNr@R15reMnleaVVEA}9ty-@6MUlZq3Ix%V zZtp>n{9^Y9?MEhCB6LxQ5?;AZhRr~nn3b3Bk4YRqbl@WJ6tgSwXvh==s<~G8{g-Or zWo>FO|7G!uHbm^nWQ`Y%>IF%x+Q%+KToHnSjM4)_GZz?gdWRIVh7RiK?K2nJFwmjY z0F4fi^M{WRIC+dR`-BHpuj12l+*?Pg7KT6}1)eXUUiQr^Aq?gLZrq0p|T(7j6Pwo-fN zTb510@M~uFdGe>37hYcv7MED3pWRpFNWU;^mD_UrGA8S`BY<`BuI?Hd<1hVRl;sq@ z*#5)<}}15pU( z6vje@DMa$|)$hN^3Flv~eai4bg3fZLc@;~)nmq?mpjB9m7fqKNr+1nfn?A!gcApu; zT-z%0lREFl{n=eB`?aZw`QeeT%{cC+=A){XqKaeb6C2j@sXJvVX zQWJN!UF+@5hjy!E&sdWZ&a64`w}0ioyaad%Wf>ZHb&HWkC2YQCO40yzpBC~H-7Qmu zWblpj`k~XOVx|mtpR?&_q(42k=D@G7fw?(3XPK?HE~v8YV$SU@@GR5-H9#@vYLiS5 zV~5H+rk$?uurW;kAf{N|+PPtQ_Ah0?9(5TcAj{n^3Qy6a?EEG*p0;s6fc&sI$_F_6 zixqoI)`l7VA+*Vn^3ra}T|97u`^!(LQ;5Kb+a`hU$pK}zXeb{_>}5~CaUX>Pl(SN; zyEo>Er!4TV+s;jyT=~P5S{nD>@ul1^Yng8!q&XXQ za$#blU9!cGv&~d^wB$(F=spUTQL-+-lsS!y4-S?b$m$!5C=my)q?{;sGJI zStuqPu%hlNE_pHR_;UGATd+iXFTSM6Z+7?Y%;wV3_V6}ubI6fRhW)=cgEFD)*0I-_ zgSmEbD|=hTbp%lVJ(tCtp?MCH6mymq{_WxTZzXnE=~5M0>Q#6hAS7P8f#*R#Hk6&= z-r{aq7$;7&vwK!@5&4X4OpS^nDg9RL+eQ|8ez*-a#%o_wwrSA--;w2Vf79mv#St6s zSrW4e0&@^I06PfFi6rM_E!3Wskc zKHu&u3*7kFe5mh>H4IW-CxPH-V0C6omGnQ&Ypa{}H`{Ezr(-c0s@Ylo+|6)y+-oyN z1HHZnO2@?AaikMUz}&22q_n%_ZbgURLfLO%Z)G7sG*!Pv|GZ8U%k#{w-~@y2K{%3W zOxhz9VzZ3x2EpD~gbq#VY2)UJ%Q_pO022~3DJj+(MSiO64u`LEAcEfx=3GJ^I%sES znkOvLksT=v$LI=NW}qTe2EfpN`0KEa$A zFpAiO-!W(CCTV5ZaKuclD*C)KB28TS1o2g=-OU5jVbIdkBcQ-vcs(1|S3oN$bYmThT*Jtw?aVe6`bWJgq29Hu*spi8e+Nf-@^HRt>Ok2* zGB1e}oJYe&-M);k^>1_HNq*#DbZ@YK4wiEXEi_yvNUcC|0;bD#LXL~DJxxFUzT}LM zo?shaa(qWD%Y1Ac@2L&C%~wFUTB7|k+icu&vP9zP57Q}7V+>nPp+MAm5fnx=s4`$W z6zqTQh4A}o>X&exo^%(oSi(mxYDJEvg=1;U{&l(i*EvXct5hJ0VQ*!;0Hzs266*Ro zG??IF*5W}vbOI`}WFhwV{x$X@ZY~_s!<*MxpY)>cIc}Z%Df%u6GKjg3+E-GZe>~&a zijJ$_-atjebcI%oc>fZGtbfSVhbGrF{8(qqY>n=+UxL?%+kIY?^Dy-C9N6H_n76u^ z%P-NN4}6rcX%l^2I4=w-qt@82SB|pj8Clz-58$zF#wB8PqsLz)v|eU0HlYk)?fHGoh+ZESMQ ztcoL69b!;i`C#GRpf}w(r9>}csmRanbT_!K34+#Y1v)x_^DJ)R3U4)5-wVGGbq_sR z?7EK3_^POKL@m;!Aw1i;i(k9sAL8l%L~1dQQkS?T^?P>H?qxo^ST_Gy$l8dlQG5-C zoYKK@HNu3c;>XzQ{rpkDX~lsSvh(tP{b~PY!adJDB)b-frU%?-9jHF%77{idR{z^$@ZX)7m>f5=CV-R>t`tFI)=*C$ z%mSF*B5{45=lpQO(d5o+jlxP2kd)zu$9Zw;@^a|}$>_4dW$VgU)oiPoM&H`EKNZU@ zudNI{>zTvC&)Ht3f?kh|{yR)fNa@R8^ILz+&R=P1{Mp)&Z%9rHkDE*08hc&}F7fR@|xLukz|6#8PEv5HC$s3#?Z z6~*jg+j-7=SYw-a+h<|8p52bS(4yf-wpg}RW_`rb)20t*DE!!&{m6bj{wyUXaApF2 zC;r_#4C^{IfviS+Su@Q_G}>NUeR6WLu{HZFkWBFHbt}14Y9x`*==U_S3FGzj2UJ zgWU>Zhn-|HCzIY2#dPpkzYt2$1j4V?n@m9~CwvLmqI%!mCW8!Of|(W`J^nZ$=*y#5EEJv0j<_^em(@s)#I#t5&I zBJ4gO^V)o7?tKr|Fg?}ujivor#@1iX`8zzb%J0~I|8_U11-d0~(gQ8;%jf%sVS>eI z!lQlg{kjMRG8>ev`-bsq;|X51RDRYBgCEBd>9qJWWX43$JSWfZQ#-{XkLtkT&~?+r z(SMlqy1W*!t%^l_&Hv26lc4|=l9;;RcL4;B+`rp6F{t9CP`1kVfj)kZDd^~c4AuY^ z8aYqs^<@KZ;_e9dP}czBE+FO5&F;n$)*Q~=E)ynmbFzxoZSPUH6F1k}HO;H_;>ECT z8CcuIKYpRYqz~#Dzi<(pZ6t2->(MK&8pp7@$&zrQl53`me+X%GJ3PXR%ZSE+ZEB;q z5-jq|Bt!o)Wk)fJR5+?^u4`DR)du5;T?76hpLW9)F2q{;mRy7bR#DFc)b7HRFDL2I5vfH z1o_Vs%5_zhV9>)90eTp>DB-mjfuDvatou}kA&kup*u@-=W@Garckf9Zs7qDgeZWWv z##t%Sw)e&5rv7l7s86(=_(X+*Q#>h; z^K-3D+Sd_HMq8~m5K}YbYOJ}nfC{4Y5?<-=aAe&BDJD>%T7@}))gJdIJA2w@(_!@; z|8IZY+*8fcQ;OVEQ~nt4`I8}i(s(*VECTT;i#0Nz6K1#!WrGFfJJP3OXZ4uH52C_@^u8&#;#g416x{-| zo7}=9nQYs!GwQ^QZ$Jd{+o^%Cz=s%|II5m-N?m-lNPk$geW$)K(l#tj_-EZK>sI=W zf*IXK(`;~i@LU!!6RziJ=LkTm*Zi=>&Yad=)I>rz?j-m)X?3T!sa{i%ionDD5cXW9 z2;lm*3SniTbjxp_%^F?prx1W8v}G`Se&d(8#`)<+G8P@ZTr{QmTt;Ctx^MY83niP< z?Q;5y_KDPPeDx`ke-Eae!XUlyhM@S~KX5~69(E-HJFcuO%nt9xhaxK5_~>XJscD8= zPu8k`F+9gPj%m{~G4>21PzbnMuoFM>KdRNU>Oc1;L2e`3WP|7M5TH@uh(h=v=iIfgom~#Q zFVeqmdh5-++by3@yV5m>#Zu`TbcZ=vt_3s$AvCF*r9 z5v|tIUXZHAcwFX3;>_1XPLI#KJ7gm~2=*A1f7c4sV+H=M@#s(`aN5jS{=32Kn_mXDu!1 zNRBKO=e}%57W2>;#ln9v4!48%O)1i@K#*GzZxzU7wrvl%8jwYqvS4bT{qT+i zg~J`st4$$eD;S&EteZTYuiciOEaZfrCa=4w;whRr70eI~_JS0^Ax9{vp%}&oS1!4w zOBJ{YzlR$={mVQrM?zELI{e*igkqm*D-+kV9W+>-XmC-6*7piQ{|!^g%x_2LnSt{f zaAT3(E_OlKIa=$m2MoVfJQd$XIoD zL`E3K_AT0CSagYznS^uWrX@FwKTQ|^ zxTZ`bv1}m_7(ecn3zD9Rj$m^n`?o8MP&)QI=J4id+m@DyppCKTf%i&UoIyKKpPfJR zW+FR%_VgaJ8XH|g72A-1ttB2HqDG;J141ipfjj+xvW zb78n$Yvnu1uLq)&q#B=-JPpH+)d&{sDk=%eapuI-AjG?FwE!)&<(9=FyGM7{xY7@} z8r<~u_wOGpy!^}{atfPw??+;nO=Kig>2Jo5eaXx9d_<6&|IEyF7Yrw!!b@{i+x|NL zq|n!OIvQ0sDMsmupFq7S9qn*NJ^IFRN@7o4_A23pB3*(aiCfsu$EJ%EvRv>wXjvp5 z9XvShKG(B+xHvd-k`|{khM7Y04P4uDS;Q>|mn`}7^R(7K3)WE<2)$cV?qw3hZz(sG zM)8Y$Fbbeb|1?*xA9Km9rHMk zqE!Dl{q$oe@Gb%_j)656`-Gd1?Y105QeRC9<`l^I6}h7wGu}NK%pzW++DY7`bR>b^ zfys02f3gQFjjHCzMby<*m}Q3y77ylf=Mx15EJq{D2;v8Y0CEKyPtbhAQxja{*!r3nGF84ZVa5D`>-d&gss1f;p4+V z`!6n14Ur2Ew1^B7!1v=Q*IDlH3e&ptDI84WGJfLX$^P1TiW17zW66>u_XoX>3cvnU z`ce^~#M|vLX)NvPQP#%(tvg7i+Gr^nd?sKq+cofpdxS!RowZvU(tWgsq3&7u81YB( zx!Jb672ptREVO)W@UbmZFso?%F zlF?^nCM6k*m=5l8Ohd$b1{zo_o0!p0v*p+q z=u_1pG$PH(aJyD*%tQXfQPy>-(}FtOm#iY^*bm4N9DoBX0)_xNe3hhS#RCw8}XKpH3oFCU6DwxRh8sb}&vsZ+E;t zlUI&dL#Nx(VQn&u-db(=f+bJ1A4 zHTHzjw@X=nk;vAS#+T{SmS<3^IaUXZjmcbD=JYJGUOWyAYny6V zRk#;O7BAt)`pP>^((0#?H8-=Fi5}i;Hi~i2sEuP|9>bB>jI5EuDH+eix7`Gf=7c8O zP(j$HG0}a`NrP-XS7c|y3}%WUz$oi>qTyPvq^jxSt-=KrNyLPDKL$KJ>2vv6jsUZ$ zUwX2C!K-V754eQmYl$ZLylx}S{pZ)*S_1*W)k~e2`ntb=vx#-brEDX*ZAp(##zy|? zM!__0T)$gf?4?=yg0b7XG(5o;rTf)m@^g%lC*lmZ-RI)z!h5ZMh6drp$m|SN;9_%@Vs~w-2B_`)fN6a7e5L<0VVB`Udc&guY=ndYUe(2g|6eD)wEn zN>~dC&q^;fW{>!qvziaw>ed%O!3p})wlNj>m85UpW>VZm!{b~DyuIe-4s$m8*^0O9 zzv>|{bfh@#qwZZ2XwW5l>2=8L&Cu?3Q69ypOvX2aD{PnH z#@a|MR<06IHfOJ(A7=Bsr?2#5Ji;ni19ey>jpWkR{lxc&&csm>CvN2m zy4JyoAghk{S7^)u=}*iA3i~S5(GuO z2MgSW2Xow`qE2xMdI4t^rjZnko|pj1pX70N@Kv=Jn_IpXMkfk9Q?61o z>Qmd!V3e}EplFOsi?Q%J*<|k_uq_x64|Q=(Glu=8Q4 z!I;2rxgNHr6q6%IEcdj20T1V*+SC5WfT+Tp@kCyafB*6Rla_R6&TZ)+rNy^5!8Mil z`SVOSQc$FFhd<6DdR>@|T9s!1;!!`ZOkgcG60P^0>!{N%p}QC@_&QgL>(@#drD2s0 zOkzd)84CzyEgBluC#iB;Ypd`{L=Kt>bYme}`EH%$*0) zmlIk#edq_W5PEU}2~4}iYH6}0S&4Wn|8~pKu#~l%Q`#(DyLDY#vueVad$^*whB-ao z?{ILN*`eZVCj5~Ib$?$+y1pTP0mDAw-g;v1UQV}?PEF)!8z!KwvaOriTNOzy!pwX% zAWL?99CvTRd0oV}Fbvd**c#NEEwE;N|JRxsQH!c}D(ZaNDgWaExyzk`r@fj^|q^|K3!g6VO;;PJr}3Rh?N-3rD@ zru|+)+g@SI!C}Pm=Mz&YbNHI26-CJfEBn9KGe7t&TzJcysH&l1+A8O0!C$zcV8w82 z9hGrIxQ6SXxLcyV{709LX+Qgg=$mWwfxO^GPj+rf-Y@DTb`LrKvCF;fY_sNdH%_sh z$AY$=&BekG-0Le;yE+p>?ZRsep!_Kz_&{A;=73m1MZaE;nI(@OF9c|5{!VC$G~*Po)l?_n2(j-pdeFXpz4> z+fiL>INGYA|0+3Eu$aiw&)L7y32q-+ygm&7ONuUD#gD}e+q^2%+r2wwxM{N*6~+0) z&JXTe?b@d9I^Kza-@L~HuBnt0abm1C5@Or|TmR=%E!`Y}hCf>TCy11kSO$*ekp?>) z3}xw4@0pkDWE1tf<}Oku#2VcR^TrbyIWgOJY)*6rm)UiU)e<+lLKOsa66hp{9dbRsnny!N<+v5`d1h z-HalKg~>%QG4;U*w@z4d6S!mh8LOF>1l{Fxj{T$U6fJ{jh^N0SnaNRgTbMRYRBe+9 zd&(B+KTBm8Nck>5+Ds8}DyAynR5&Ze+?Nd(Vl-J157_E=ybDUyZ)DQRk2*f2;hpO}7PUQAk3{Rc>Am5u zFjOb9?xi6=+b5a|k+F`C>>BaKazvZeC9mNGAq4-Qb#-|6pD42i_3(iDUl+;??zY(i z&~;;JB;hh>y~aXOCc)vfT{MS{CV)y8eh z7*q6jkLwdLzwG&c!*kks3j!IFBUnsNn2K*W&IsOf`O*mikHV;kGP2?GSz5>z_mElm z#go$)Ka+ym(eKdqbsF?r9z8`Ux3bjxZRecDYorEhBV2X$P-dMD4zMm6FJDfM60455 zSgkNqCvq03g3h@hd=ANf-q#J z`CKJHN2f7>NOW)3DtDma$hW&0eAMv!)r^y<9!Bgz^-CT5J`$7b1l3XmjjTmA15?(V6}MP*r9!aH*;e5_(RJnMny$`14&b6ISQ zTG<~prUZ9mc;t22TJ_8m7uh5h5&Nl)kIiEhLq9{JO$fM+9l=x~pd`QQe+3>e+|-n^ zMDpjx=lw0mu#jY(JbYC0p+d5Gs5K!=;g6m@`;e8gQ$^FWYs4nM14l*}^0aXMYl$3S z@=L9WGxqOz*sOVb^W4R;>pyCPWuzvO~ z6>Z~$Gy6BsX^;7kg1gX1#KxxS6uXh`ib*HBY($)@1d5aCZo!bIzXqp&_FnyKe=Wo_ zN-^ZFqhb@9ml;y*3Jnaqf}v0m2wNiLncWb5_rJ3ea3uLmI@;?*!Pd)$a_MOc#vXU{ z{HgMAk0-KK@Pe3CR1p&c763+}$+r)RwPYJ<(f4)Giw*69hvV-+!#0DhtR;JHLrj!O zy+skaW0sXv|0UL9qZXNDJUN~znPVGa;~&AUva4WjxrX@$^mr=aK;gQ+3N zmlK1yU``$?61-ji?;Z1k9TVryP9HxZ=vTRO;YPt-Sp`=7uGv}xF$@W9h?hDUjqE;n zsUszqc{zwu;egQX5V_Wqk|D=mC%aKmPA`yi2r=Z@zWvLK;k7~hK@Zsolq{`pD3L$h z<-V|d?&Y72c}@WAaje0x!fWceMq`~ChA>}%#IR;YCFCLyIaU+mx{UEhUEJbsH~M66 z80Ss^cgZF^JCmR{8u{AagvlNGyh&Nt`bSJ&*_}yT>4ZUgm3wo%{oAG* z*kCuV#lBAGJmm$q?_)`zl%zs^avRtVNat(oCMcCQ}t*EW$6w} z+xpz?Ulg`+k`=Bi1;r(BA-h-CET{%45V!aX_?+WTVh<=BOg9Zh<$RB8oTeJ$IOxd! z!eb;P--%JZtgW$9O7MK;1dhWhuM?&3Yn4h!H+p1?Fx7F1>4_GM$oLk2r|#n@~n72#3;blcj^-y>V4jF`3^vmR;;rGyf039Jjg zg1vlcNK!|RtH1-f#y)X=lz_o`A=gceWRgW!4xLcfeDb7wmE7F&75;OHj2Ls=_h%#q zns})gi#xz07g&0PkPuu0;pKY&iQ@kgDWSz}E6g@BzxWg-dzF68Qe|;6nl6uz#Cdfu zT`sG3uPdhNRT_AoYm$^nIf!i@mHX9C5q;;DyA*qNySj;m^9g=;AS!v48TewNV{*LA zibcgw2mmzURxSG?=TGoRbb>lN#+&%@vVhhh8^N3|^&9Oz)79xf7;bmQN~~#s+oe=w z)hxI2X6>|sgRH~_oUZglB0?%obbSPpn>DF1#xNxb{?F1VFT)0JNHoZjBQ|^bJWBnN z=Muy(Kj8TST{a!u+t!z4q|N6sFtLSX#=hn+c>ykPAr}@APGrAe2T^7WyA^LEwg*o~ zY8}+7?ZzWAJPcO~zAy^697XM3pztk*lcQ6PhLta!?fG+9z_(-ZxV5-)0na^t_6w;HR9UIAz@3dVhK4gRy;jY3CLWZ$FZ%n0Qx^UhbzGs2 z$$ej6z#`oiOZ|MBal7N~&iBia?s2Vl6~c}&ms{>w-Np4_wXj!tp>S|Rr~ID@ln27; z$B((c?@H+VoRVf56huREKF|wW@j=mWJotMEhk`+~ zBvy#n*5Yu8&JLeKw*2#c!P48qO%-*Ax%~t=cGYr-0Dp{sd_)!%$sHtCyyhi7VuqnO z1$txifXAlfV{vWns}{-A6mfik=1J6}LDbsDkYz}BOguYuBeT?m-7eBS7*!6Ap7KkD z0Ta+guqzTqfaEHbPr?G_4ZC)?jz>GgskVdshZn|ji=m>PDUoahd*;50gNm>+>t|R5F^T{@p*)KPT9mL5=6;TKr z@F4Gq_?t>2yblpJ;l~5~!-M$Y`}P0xp9MUQjU63pFVOZMdusgRVA5weI~ENW{@26Y zxNl)$k{f#BH;Fu7dW(;Ng`2jx2yP7Dmz1ApxC4UfwLM0HR9 z_g~%&H;UdUCAC{a<9uzDoKNj!f9;bOFS0s2tr-^>M#qlb-nVa`FMF*0IMsXL(-Tql zmh3#v)A;zViA8p&mS^V~Q_}~mh#p6llAd10SDvnS-z`rDIlHgVk#RvHM!YIkKs><89A2}K(-eM9wB{>V=oLmKl(>h9^q5M?32fjQ}Uc@NBX0Ytv-;_h7NTH^snV?5tCAB$gv@H$X(_2HY=QSlgFTFsmi%#%%EBycM@-A#Ab?|A(5X#m4TZcdj!=!S?PrVSUVloJ)F zI{JV6?YBY{UDGI0zoMNxKb)SZS)R@&lc%Ppj_*Azl-kllCbL^5Ws-8s?q z>#JkqTQFFu0UoQ4?5sc66MDRIE;e{LR+cOuaDda^8`djP6DqYnLg6x-m5I>7Y7ayS z)xaj??$^MBYxV&W z@*!^+e>w7Ja4^|C$2!Bp_WChM*%aEYkrnbVJ>ue+B#ti5)gnFF^TGXE!Bz3joS{qt*)70MFL0(oz!mT*k@Y+nFU;Q|Xz7h%1{_Yu zCHt+FtP$xqh*{Clmw!sGJiri0C8JO7Wp)jAg31T`J;XqoItdqRt+ma!fjf)2ITJ$# zlv>czfQ&h56#biD<#abS?TYdtJ&X3+m)~S$PsiV5iTx)C+%T@=C-yvh_Doj8?ObM3 zNoiG#Byp~z50J?NoW?^CeQEu`SXE&_mpJi1z!9wl&_Vc zqUSGOd^0~iI7$#K5H{OTQmc-9*I||S$7Fa$AqXCOwI}wnt~{HLYcLj%kL!H~ z{nTjvVC=b}Z!M>*S*5Sr-J^uc$4J{e1G9ed-B)PWZOBhDWvP@sbNv5O>xK$XojPT$ zLOe)2(8^;n_}Ov(*vV??*>_@ULwU9GO$vNKjkKx2F*tIQblxH-mSwXHgBn49w z^txu5x#}K&PD{f$iFJr$#~-lkd|1P2f*VMRRD$Z`vK`3tu7@%|w({SYTLC|}r6*tWSuw_rszgV)9I!+eDntAptp#9Q!7O-m~S zXwzzLelU^d#cTB{okl$DXl(xCs(PG0EBI!@f6tWmpcUV<7R;*ImcO1IrH$g%$PMuF zb{e`h;$ktx6mS;mMM0%HdqHQZ_4%4KpVY`4%K7MJB;>&P7Hzbi6<+H_a~SyYn^ie=+17_}J_dUUjLY|4kBlo(&CFuk zER~S`>4UH3C-%0tw@0QOLoDmy7nr3RqxLqp_d|Y3b8Sj1opb)vAL2fl(0$q4hfYLs z-VXt$h-`YOXn1jLh}3f@N5}Ddc(yfB$7>R`i3+Q7tCkfRA-M$C zuoXc9TAX|ctx_V)e$gdgz;Ej&fqtQ-PSU)1j}-kYLgj4Y=k^qx`RHy=2oIh0s>~d` zGs+ax?cz&nunaAQ%`-TP+aouh~ zGX=G^)zzh!qp2?r^>dp@Drp_pT0>~XdeMXR@uND)ewy)a80O03#)3;8p1_^S621q& z%5kz*0oUnvtMG)qWQ$Sy9TaQeJNO_Ccf1{RMW}1F_#Se@QV3NrM-MI^j!|m;Ni$%@=G+ z^`BRq>{?lj+BEXEN`88{^UEg$Or1rwbx2xp|7Y2^odk)Y`MsH)=e&J5TPJdR_UkACwXI36s!GhiEAE>{T5|%Nkv7) zW5Xe4?rK}BPi2Zbi%xp@jlI7tgI^zx!_IS3P}hwUL$SBDlycOQ>a0SzzzjI8&n%Wsyt zq~+BeMD5?cV+Yo`^Kv3B9=H>12VxaP%LikbCDB>emgB(Ptd(90s7TUHJq&xU-c8}A z_tot($crO-0zG9_LSsz7ve1j6Ohkmr-a;Ilj5r@=^B!}~a;WHX*$z-3WIk#w6rOar z%X;T=CR$M6Q~6&CeM5%jnuzFVuMOiW@*Tf7NZh<#37@;BHC;j03a$}R)U2epD!wv; z_u00X0T?TGmalSFYi(-w3MU;wL()-I_5!u|H439x4AxM+$c1F2o2CcdXN+D<=(W}v zoBv%>66;)Obop{>Ak^Sj*OIi<$aAk)nUNaO5ln1*ioe17h)~JOxvysj=UE@&qD`ZS zc7i;oJ+>HRXm&&_mL_-FAK5UqE0<$>!DiGd4dWCRGfcGC4Y6OW_$gmx9MD?ou{6K5 z;;zoSci)5shn6)xEeN;RufQdD=u6#T=7kW7iWFZ_qU}96SD?Hj?;pR+;^24$LCaNg zHHdLMU2#3%ciutNTs-U?A>0nqqJ$aRlQz{`bCVgS5ZrckE!Ub)5wHZ)*rL4e z0P*_7&5iH+dsnoxnC9lRA#lwWTKkBbNYgoQ(?J4q_F@$4QziAtg3wOxr0$z;IqNiT z2HdtW@?0^4qVV7TKzpOr@%e1P)nG?f`d?aBqKr>cR#uM8LG3NHqXd)pcci<1D@-oB z$6)P{5xi4dJd)W&lk!bBifbL(EA`B(!@b>cEX^oRHr|Wn3PGB@U$%hi{{HUz$ogX1 z0jHV>e>t`+)N9>C+C++v&&A(rG>y7UMf!XGQV;gE{GbBXLZ5S{Jx>n5ZEJ6>+CD9$ zneXWZn!;|Zg}5i1V-7-WQ0!-bu$y`2L~-qmoFoBsb!DTyzn-fS%+yDv&9`;pIbylGy4HXp4p|aI+Sh^;mld<$dto zlLR4krt}mRV-XM=nK7nb#r*fUwS&kHq4p76ph;_cglmFg-^@wkjk{=!HL*&bGd|_C`Q6HKXLsf{v~GQm)N8-Usg6d0~=-=QKMoiXB8 z{?|d@d8$r1@QDp7Vw6&QGrh9!?Triy*4tD80KJh9A4Obnkndg7!0)oa%l zY!tW6W_!kl?A@fUt|jfK*=FhscS*CsqT9@ z23AiI=>Z_7IAxwW}1B5#>V7MP%vm?I)QFXt{G>yXF$iQ;q z19{mW{NY3O_mj(j@FXa6C>h1v`{(rd%pe7>O$vDqX z7}n}|ck*t|y;HSAv+14-C7aE7vGQj_XI{M9$1@)ewuj~BM2HlU$#37^J3Lr_n3nUl zr}Ayc@`V-r!=FZ-UL0C(X=O)J>#tcF9K5gs-tfa?b@iRwx4*h_>`2>iQ@Ylw@juo? z;PFjr-kde@#2UJ%n~VF{!V=4oM8V=9LdQw)`K{%V)~0k5ElZ1Ky$ZO+$uyeb%f+GO z=@A|I&Q@Hn217~a_ zZ1)V{Rt7ER1&1Kdn69^X_&P!U@WCVzn`UyM0F}91oVv%FRdDk)?YY5c^}D#*Bkdbete~E z!2d_rSI0%Ub!{t+G$=J7QVP;2B_*Mhl%OD;2A$GF4=9b4(nv{zz|ccT4jqCt(%oI( z9z5s$o;c6>&);G0d+n9iy7t=pp7Fp-8VfaVbEHQAUrRJ z_S3ES#dm7|3KBxxhrVPbLtDdwdo{ZI7e?YORw5yy&FEj0Dwx_##~sH`bA5q|RIiGTy4!pPBXkGTFs zc@zemm&jVJD%f-k9h=aiC=qdazKKIf_>;@_tGFelJOdbR84 zwas$NF@^^Z)DR2nFVxki&kv`pi|QQoAPX{e!x!20M~KT)#OA6_=iVs~$T9u22^#95 zweRWSv_7vI=dL3OjYpjpw|AQ#|43i>3ck|^uD6sAMxPbaf5^cdZz9$wTwlk~ z?le=#=qd_#zTCX0jAmVnDRLngMKM%WbBdu*0eu`!{L1UJ#VE%uT#ND zU{;cI7fMPp5)#$#jEO~L_mnJ%C*`=StR||S`d5(#V_{(#E%#^G*xL{2=P+6)D9Gev zq`@o|>VIV_LK4J1^}w~rat5r|b32F3N1hrtP60KLV`5?wH0o9oO;kCW6GIgOwxU?o z_RPmWi|Ffnp6?IsXUsvnK0nB6_s*qXimi%1C80zbkCa<#`&xGFuCA=)==*$OL|Gns z$d+qPLX+%2bgIMSykCr!SXLXgdCDd1*9TEDKDO=_1eYbJ=ShCT5AqjD2dAwQb^@OV z#PF{L2uH7F&FClAA3T7XwU)CI3``{a$-u|S*Rzzai=SPyn7BbBhW~KKH{mcUA-Lqb z_Y`zsB6fWYU;qbu2+&7FyWv$0=?ca8$}3&!utwb|jO&;TIJULXveg*FG(RlRsS4W7 z1wL9U^^2Ku-wnAKHD{vHsd2XTwNY81+Zg;tmXMHOMI{ez^tKUoSR&Q~tH(NAp9!FA z0fg$l!laHpPVjuO=l+leLHRa{<|BZ8joBLN?p8vyB(_681PA{BjyGjc?Rh#jRpfrW z^VE?V$^TqU&CuLDh*=@V;BaH&#N5h39L@Mwrif?DSd<4_D=wctD((IFLTW`GtI>Fm zmjE2(G*}>_;CbEjE?Cc#S`8r_7LBMhNS0zbOM4Kmp%eN=YXGIfypK6rg4oQ4OJ8OQ zBxkf=6#AbU)UL4b)9ycqa@h@WCtgtE2ZLA%49w2>Q((IyG2%g!XHGmV$9=jtpqwj( zE&S+FPGaNo^0KUgf|QC1ap-N)k3C|i_*^wxjYIXDuf2;g@ca7uoHuI^sV(#+XG3X) zjRrCm0qjtTjaXNZyK~<#6C(w-4v}A|imvoVnvRr@y(RQ6PCXqJ3>M)0g>7AToJk!l z@Wj$G|50h#w$1Q!5J8tg?Vs-D?$VzgAM1K_=B_=k!#+ZaDR7GV&gNVcjD)^my*eieE?qn}i-fjvP& z8Jn^8mHN}9co)GH{s4tx`L8HrvUUjY^GgBvm%vAY^|N+4AmLYvh911`R6+Ll@4)V8 zG^*n=SU;6)eekCH;nGe1D2#g2wNPrlm6ZdTU`lTy@w1`^?!o7F`mehaeq|p+9;sFy76;Dv^)F8I=)Ypy+dVrisVK$kj+|2l zrS)WVE^GK=Ti>7C(@2al!fx@BRo=|9 z0+XSs7u#W;*-hS41h_1bpFRyK8O{o&#Xm36r*q$u$kVNUi5P-R6xM5&udc0)XPtL;8QU|1_t9d+BZMH#1j;{&KuGKBdCw#88$m{cr4Znx28+! zyH;6wa?4tx{2pWMSrRc)kS6Ndh7z2Wsp7%-!ahBGhUd|vm4$-yy#ezu>oaGA-PhB5 z%Wh;9a$Z?A3$5PhRUSevc1>fE9>+*7uK<|=^q#SG3YRm|w2cY3_3aP{cAgF{jYzik z!O7ynp%U< z!6>6Q5vmSoFCM3jabd!I58;w^K#86m&4y9(nzRw@Qcva0xtuPFUw(j3x+<)1O;p&< zS}uylEIxzX5tt<6edB1nAYVs@I{>$yDu4z~zijnMMXxHRyP0yjsAaHLUmy1@bPjVz z;Kid{vhSsW^CTgKVXj8Ux8LMVE`GFp<8Iq6jAe)`)Eo1X#S`(&NojE*)Z3Pg(Oc*7 zwIU0ut@eBMO+->I-ZsXpg;d)|vtIuGoL)7$UTlT6CR9>^%!XgGvMxHFhM(S~_22-8 zx7z+ah8@ze`dp{ZeVyM_w<%zTNoV(PSoG;UAIZhVA%t!UOhnY@7NUK+d;Lw%q;BAS zI2k3**_N-Bxaj_12%Isa=AhA+2u;*&rLiv4{jKswtSx2D>yQn*UvEg`ls?)+?`-_?l*g+yvle~@9xPua;u5-F|{?~ zO;6H_W!u&OFWu&Z-zPg^UZ=uOpFU}k?(sVo7~m?H5*jt}e>u@4C_r#Ixp`VWDd)0B z+x+!iX1y_Hu~TrGZ!&AACSM!Av7*xuqN=6k>L@O&QG~A#Eu?%D$1D=tJ9Z?7>|Nx& zw`;W=nj$_iRPTHb-I!N#2d*v;Tk7U2c>GN3whvheaZH>qfGannU*rtS-79C8@g&4* z$q&Rx=w}wPneJ+;h>ylFB#mr$g)>MbZy#LjiaREzguKwr$GZ_?ts$u=EPRe8K%biK zAPI@NEWAXkJCTRy&$dD?DVYVKyNc6U$zRAz+t+0SLG0JPyW{a4ggxRix7r%+X3=72 zM_s_xkV4NMQ*`<=Om$Bo;BZlv!!4j%CG0*Gqsc=~*Da?(up%&p2=o2|+IHH`ygbHMG^ z^C@#u`PSA}{aS=w0>J8gW2a-(#Z7Tqc*nx`Q;TSJNr@HYWZT0lTtJ$YZwrU_Z;xd4 zgwPKlxzKA}ub-b>_c%pMe6;*X3?Kyy% z1h?}rV24+}C9#uq%w!%7sr)SJjvVXw;_3f)C}iiaz!p1+DUr}G z!tfb>rX@ay`vNf*ZzgMCV-aH_@1G&OB33uQk57KQb;4$yRziBT-3pti-if<7R@SAc zSyNzL{taE_nC_EtM4pWymN^l(gbGYpY?}Bp@K5L8&y6#MNi=S)3V)o3w)5+`E|2ms z)2|~ghHQjbU7+b~6+EfMwg&9X-rr}$+Uz-F+btEQ&reqC$E*d;X!)XOkaoxGP<>S5 zjecL`>&5K=#q218n_5DH7n=sD3^*$+z z`bo}l3xMGK>S3q%#T6><(kEqPSmC_J?PVg4QGIE`GQTSPirGDi{_Ez$;39{^j_}DA z-7Gt8B9WA_SW*>vfpSCrM?%-N;b`Zl0{Dk>2c4Z8?ry^Yg`_V#EZa1i-d8#F5g4*d zDJyV;6A~Nf*LWB69k7u0Z`{X7Ej;&B2dY#|#opqwhQR4(*1IwV-^m-W)0u>mxCTo> zaR;ZI(KX?OHa5NM3anHYr-Q5$3AOzE_1JXR$nLEOIFLC(D3RQgJJog`UL1-2I|*`u zY>7yi@yeFrglVnY6eZ1NtLIWx%H~6-oqg|OnoE7pe7qn6I0v0{IHv{fvT1?Iabg60 z`8+yZ%W4M?Jso~!{P7o{OfBwtc9iP_=ikx63#O%_Bd7R_pML3}+vq{XB^IRKjq6<#l|7&D_CRfB7HuJ$$yWi2AWwy6QPE&>fE9^V_r&8UHBq^s}A}n)YY?qn8_Z zxnnkLWm;jau zY}=X@IPHbG6HAL>&?Bh}xq3DeeQbeKk$p|Le$`zBYTd?akshSs=C;CQGWb;AFSzw7 zw0V7OAMZ@5i=LZ5%CA{jBz0AZ<9MPPQC`C_3OG=a{-8K(;bd4tzK>{$hoUox4N)kU^>pqkd3l2{z5atXCV{<7; z7e0DVivY`f4l8nA860y;Lk3Wib6Qjs%!q+zvW%gFDjd}~1!m8q+RCo$=(8_Vs;C8> ziwsj7vRppIr&VIKgb)Pz`?>XJ+~4h?ww6L`20bF4u8dD*`=>$Rs$m zhZJdsnP&K&yx%1ic%p`GjJxmQP7S^D_;#*);Ixud0Hm#skjVO|;ab!BH87`K&$ zCObauT1!vixb^3I@j&uhs@+~!4lB(;1Q47VAtfaojkj}Sdv>^yiFKL#;`q%u8bZw; zH+cR%1^ZifryL98pLwvca2tEjlb}6i5>%9XCwN%w5TW&Inc^+RFPNWaOGDE5Vsp?w z-uRC$0L>|!TJX=&LYPjaVTJsYO2)8^7$Qi5U|*52?m>jZn<;r9`Enf}``E~xYMw9D zl(DhsY#RJJdJE9WnpcR&jB+yY&Unk3L}AA>JvvHcv8mPMQHmCfHkNp*s1Bpsa%`m# zBrr4Ds0Vid^8xw@B#LkS0(ho;F9%VW|8gUjR0>mK0^qX5!mAP&NGTPF$D&$|ail=< zkeDbkJi!cZvl-;oFW4?A5s!bA2gh(LL_inE9#n|AM?c>060lB)KUOlKL+Zd=#&|K- z;P&4O%sz7oaH}Bd6(2Hl*Zn#$t(!J(g?hdnAFS7GTul;%OvdtCez_1fqC}fKI#f)8 zJ53F7*^mKW+VLjDErjG&P=cmthX1yQx+gObc{L^B{K}R7B}|ypn@ZKP%nc*`3`vvZ zG#6*KPa7+N^vIejMS5Dq_BO#{e6+!4qSGE8Wkf+AYP83#KQ*^+iQ+92?tw|0>Fn62 z&{6G5u!xP{?*{fX_`OrBkH2M6Pado0pKm3gV7mVgDHvl8xVKg{j|*%FA72(T(Qa+{{pb6P8+KWni_Wr92jJ*i0<9|33D~ z<9I;1K2<8DP}J?(gYV{J;nR8x5_Qp;euYj9slVfR2;ffDm5$7vwNRGb7KaxC^I{4t z9QJaM9%FXqT~&n&(RZXpo`?ct__vuiYQXWZN2U-nC8XWT3EBE7ALh`hTj=n^BZ(+S z##SjRxJL5#0s~NElKMdu2dbAfG8Arnlc&7+yVf$jcp5nb(u>?&xf) z!1(xNCXM+=wAHMz<2nDn7{uq@tkP?#cFPXp5nheGlYOsGpEjJ$gEKK&O7buUT96&* zR`PGnpP}jeDr_06)^*wiA~UCm0eoDuosljc{34)m|JkGYKG`MQ6b$t4L=td9e`3M6Is|H1VO>Tb&_VJRi{V>%?e+s%g8!%_pSZ*`W<)obPs5wu&3 zz#LvGENnRR9`4<40*(a8Y?RsjPV;R^+J4&E2nzIiFB&c^=BR_5(FQvn^)wCfJ>hD@g7Fd zaPjI_fc67`OHI!QSBxK2V17;J%7X@Xz`jk|f|oasF7x%o+FC2h70>fktt}JEq|q)# z#I$1pQE-Ow3z(=?p7;aqm{U&rG($+g-04R330uhc+5uCzz0)w4O)?GHO z@lr$JO<4t0(9Gq+bgHAckMi(D3>iS3j&~YB=S4_MZKW;N$!(VoZCC{vH0HRrwuYLM zZR|FWB;qldoJ^I248(e~0oxWyXbE70v@{~#?tt;%VQL(8_{MYGve%t&DaGWL;O{;` zQ&;B}1=i&G@r@@mN!QwY8(ZLqG8iqSq$fuY$6@16%lNf=PcVzjUNo69o1Cu;oTDAK zgP5APHGfDTe)CS4+m^w^em||S#Yoxe#SXSaK8=L?lhaVq3JM!LL{sb9~M=F3+A00;=Hl5$lH$@=F>s!WRLYDzYuUlQ9dsm{wp#uwR zMZ?1Hl!{WJ4dG(nw62i7x(XB^tA;)s@88E8w(Wy?f=Mk0rJECcIN6=psQtejq~cb1 z=*-$$3uc96Ojzj8E^P~(WF|rF@@I&8N2a$Bv9r;V=cHotvGeP)_ZCv62&6QnP=Js* zF(Dc1x=1`sd{=(4)KC}KOZ-fQ;%FjRq`TM{GMVI;+8@C_JVA8G=FaC30oioJ9e3MD zW9KN;J+QvS!lz8p{}%DZF4@$1E#47(>f_p*&vau&mbj&0e{-S96qq4~a|an~O%tO9 zi?p&>tfB!MpsS(!P5kMzdSY=l)%3Nfq(<@&{xkdXaTPD({N*s~@&~F51FsZ8oWEYumb3in3?7D%zmP`LqJU% zucG4GX3c<&F7C34_{2wyk9Nty0V^?9D{qqe92fZShR3$*X=cX@yDWB(i9fX!uXbcM zp?R<{T5dXZzMD)C0Jb#LZ)>YP1ld`WhUEIQ1e~=g&&6rY7$QO}J(u~>a^~RmpNc@y z07c-rSe@*>s|ZcCKUb1cgDhzBZ)nAO+;0s!gN_rTXv4HAMWNHH>Pu240&-LK6@x0< zYS9{6MXa$v9&=}<5mQ(54Nh3btoj0J%$?opO|QkIYH|@dmXjT1)vEQ=p9DnMqotMQ zp-~Q}-Cr{BzA-=J^UIi=tP-*%Ff8_il1@_h_`1Wri=+G;N-nZh>oXLFCCzbZOe@TA z>)Z~lDniGxsa~EcrDm)Z;*wDpu$^XEY8xy+uWz^>7-UY_fn1kwEys^{U%6T2Udb61 zb*ET?0(iNNIMAyg(QOOLK}f7f!D6zt7Ix9wHIJCbhxOMd)@8H4uW8^q&r?3UNbz11 z$Gr@r8y3w19&g~-gmvySTp3@lc>u;qm%b7Ve^T0`m-6~5TAdcx(Y}Cc&Wmc~ibJl_@A&RwG zxUd&`5IpWt;Z<%<6Womu0kj};z8V6m9gU432MA=UwHg5Y@<$F82j4}you)7|6$eCH z_E){PXCd$T6sXYjag{fv8Q|veE>;Am%AulEJ7*`StUZP9#J^uA z&N-1$;t7>Aoe69(#YP&xKm$S}Dk3MFo9lg~3J{;>;zCOAv*{n_@)1LwVIXKcOrdMNJk0Tio6GL!&UB*eN&Wrt?`-R+h5cfhKt4Z6D4_^%;qFe1_M z4a&-~dQG3bVEl=qg<&UZflPe5+u@Zd5HfPS|DBS!K~}UXfSe2qs#m=@U-Vo-6Z(8u zQ7lEO5N>9Uhfm{tN~|KOh5OqiTkp-Ai7Ha(_MR|tLXmC1f(M#k`*fWi2;c>0G!59$ z!3b+@11)AFRg`_6;l_fK0NaMe-fL?1kaYBGn@Gt&MH{fzlLl<~Mjar2c_li2p@w4qt$CJ2Q=*bTZl!ay zZ|)ecB-NruyZ`N8@*t)eDLXQ}8tj~j%0|=2n~E7Pk3dP=Y?K1&cINVd)*s52}r(30Nwe0elvx()h`cnvlb^D*UHK5 zI%SF7&&8s)cI&U}-rK7sL_&}@B1mj7#(8gc@lcOW4Tv)c#(A#Doa`s4u>?SP7@I{< zlcA~=0O5aj?*Ivwp6k7>1R`K>5zrzWq<=O`d9NZD*jtO~{U4e!PK)U(Or%ZxP|ySX zS0+1*W-NpHW!69c68tX|gOo`Q=9^GIp!&0&4@SaJZ$Xh4YC&Lz2IW@4SEERvwp{%! zA6Uq@E1|(Mo?hWIAoA7UqKJY)h8ODBKvGl|CjbK$0L(q^NjhOeEj7FBA6}uZfh{e4 zP5ctyVhro-h|$127ZHhirxxVTf}{r-P?HHLK#h`;q|D(jy*-n)wB*Ri$=MVC%>UO~ zto+zCymstT>hwIXU;Q!Pz5n~;Cr>cnZ;azwTU&o33I(75Ma~}aGqG^qM_XI>*7r2M z8fK6ldDgoIDv zZ%Nz0SWtLWlwSbgzzISWg#=&!7x#+HqM?|71P}dNa7wBe zSFl_S-`}f9K|w-%d}&?XL}*tG3eNIj=ks#ciLq?-V;+TFzNNegk%xUfI2~tie>=yV;&cj30$H&L!d(lV*li=-#Z;8vTCPo>2jjya!np0oFDp8zz+r_U*73J(C zeD)n|PFIrfGV_MnDk%|yYqBl~1n@qFtX`faU;3g3ggJPD9>#lbWvoOm)@7Cxnnv@n z+|PZU^ZdN7-2Z(9Ss6w?19x3~islM9B0N2$u@9j~Ju+=U-eq4&I`i=)B_SbUi?aB6 zdfl&6aEk9Y*!*TE4{A0l)_S-8>pS)(IL8nY@Bc#}H--|Zt?sEF9+(qm_?~i;9`{!f zk?arF9v0N}Or7#$bvsV^4()@4A?dDYIqViE2v7yuY^*-a&d8`(6Fk&3r_57)Yc{O@ zV7-2M;PI`?%{yivqBJ(82m9M0eqgqvsHEii)Kc^e^H}LS-}mdISHEgD%h>OH(DN{- z>FC5v4&^YqF>3mrPY8T{f0q-yC~bn(kPPYDTWjt@I4iGf;= zK4;m0*`|VD%Xc>FdrsYqX))kKeoZv3?CJu%wr)Qkp1qB!95Be}CYs4AFdo`>ufdt3OuQC{I_Yv|sG5a6c}&h?d#y`vSmV5cot+ zMBIx&M$VZyL=)jlwy^B$#cM&Uxv_SYU8A!j&=#WO=J}I+;=Wn~J8#=3{sj+YVUaJp z(f>SCQ!$6%(kqSO91oAdj5TUX4-AtlMhuS^DIyQMr|r zw1aY-jsZKZM0DNbh`R-f9dOEvz)(Gzy(ym%d}6S@1B|v2C3^i zJw#m6f{x}doxRkggC5aeoD3r-w@#*&hDd#ke1B3?4iD)|SgBGexk!IAEUw(C2=VVo^LbAr9q@%|@TKJoZSoM$HA1ErRm4*MUqPK10jw=@<0%NS_B=xc+li-PEHW&yuURl`Px}p36{VhY4 zfR?3!g#iss7&T_8bzyw9rqE&b{xAX!?Bh|OYThS+I@;1%5Vast{V6BtF1pj3n_=K4 z)k_t9vN$6Z3mb6fm%0?S8Xz9W^K%OP^`Bv7T8I@Aa#+$}2$Xk5pYI+My8Xzc@UrFU zs~uQDH%7+|o$s1CKLzbiA!aMgCHTREeLPaYxypf=Xp`qCrCs}@E)`wf{5sAGXy|Vr zAGCAQ%&%n_E&KZ#;Tq~N4?_AS_})fUPzDyKcyNeJZ3;*JpfykyCLm5DNbpNa92&O$ zaI2;@VHp=FE(&FM(kp|2XW#vU9jKzBqYdZUBJR@Ogp|hAYuL83 zBK&mXHpzrAey}w-NuI=usaf=R*dH|{pXrTKK7cMJXn)*B!qk+xJ3+{}U!qvT9MH(t z5xwnvOjJz>k-j?^R`UD0b~p;}h3V-PkjFtEHh8dFoD6V6xpYx5PufKOKcp&qnorMN zfUv_Hjl4nQ_@Um*)JcHGvGB<$kJ&jZ&3i%_aQiwGDeX}BXB_+`Kx_XhjH>zIeKZi5 zZTDH4557aW_1#k>h{s1FD}yxF?VmW9iB$s;04f9P7`zXR%F7CH#+YEvW%iEy=7SN2AC@MsM3>G-0mDxKd$wWx5$0z$ zFwr-mCr3M8ZlBPNzx$tZI}h==+jl}(JNVPJfqg{(=bK*yY?CPOy^}PsD7O)3*SOtK zqMtZT_8FG*<veI9?Hs}?iL{s6y3;G#zGK^LH!ixXU^BVqe0`Vn?9K9?m8{#2d` zK?LA5{(g19;9%6v=-{UU@NE^}l#0W&^CB&mn-4rIvSI3= zwwr0{WaYJ1#4TO?87BQ_{r8)K&l?1E`j95Rdau@u!G zDY-qJ30I?3s*q_AVj&@Euk0n)$VRr6Nbj}&f%5+#5+7!Zq>lmBYb{9@%Xj|Nw-+g1 zq~w)o5qxyiZN?A!aPjtk>b1OHQK*CG=?_ffC4XTOx=Wqe?pxOphPxRkbCHV{C15Eo5 zB3F@93WJxXiT{~*HH!w%Dr3LrzB5vguL-m4N=F z+(GL9RMi&_T@{_Mj*c59uQTfh))}B<$IPJ{6JASq-rQWsU_~^dT;_^rWH1W>ZFDI3 zH>!>PKb3TabFviVHwD={9_3 zV)w^a{#$wEKzV9VIwVp){L4-BMWoEu035H_y?{@Qb{Nx`UB5NZyU_3_xOn&7rTo!p zl7u0YdgT{*pzHs5;E9@UVpc^O&FAG0Teb+Vb8*iPL&;;)1KSfkmeSb4a92FWle)m| zpVresLg+Sg^^c^ng&8<_Fos^tpKvVys~7*>_JhiMOI)j*ho>c1wt<=lo~E zsNxsQCT{y5r*V(wIG$0Rh_&Fm;z_x}n~KDGjAkm7Myd_T&uGH#bRLrW)O4rSsOL_7 zK^X@X&(9Lwwd9q2I-IKTfV}keNZ+#5f*@LHm7`Q5nYV`LIuJg^WUzlQk48jYK zyctpvcV})sGo_?Pvo;i!c)?k@f6q@FY%X*)ndMkdGMd}iyQTDO#q3B)XWTX(Lbdpa zlEdKjBNCeiMX!W|OWua{+;eR5`8L6AIT%1`y#MIwfhP$Wtk#<^7xuemNLKR77Y_RE!l{Mbe$T?1XaKR~P>&jK9yGym8$lZE$-P+*U zW-NEtiAN{Z6y_(($$HII4W5Ptk6@au8!bd0wAuoSe#rS`LpKA+&u0)H(^7R7Ll39K5NL_BsV@+W-B&YC3QdEcV|x$#n^r7&`=VgTjZcl z)LFFbW&6|GNE7jf$F7$xPNanKDZV=yS%7DthcKdk{(-pBB0Q)FIw_l%EJ`VE9T#wx zfPcQe@>dXlV~(8GSYV1#mY-jIP0!48qi^N{hiIqYznS_T24)50eR_;nzi_iz*wka+ zpaKsZw)^)ObbETJ=R(l&u~aq~U(zcJ{b6j+%;K%xV!NN*XEj!YKcW0Lzh}l7ckf+~ z6Hn#(*+cVDR3X#goE;K;d+R#ZK9q!f;&y9&(mMC(i3dLI7S`)$0{pv5CQHTpD1|0c0BgRW#-8UTVq*47 zOXr4|He+TN6kMeX4IXYQy*Yf{!i21O7uLckmzeviI(q)#80o$pgBnwzB0vkok>Mer zg#rB)s*?lgqHpu=rE<0!-^&-0@v;6#Mb`vxPx?1iBfQVs(MO8i3HjNngT88$PcLFJ z@;IqeJfj}M{hp<5G*KlqQf$llvzsQ^_FOo{sdcgMbT-TI*FGjbk0&O2m;R5^O5d#} z@BJh9J{3*v8b4e9O@qj;u_M=gra2SvdR`$wO=eWsW$4jDt4h;}Us1IOhTM>p^6od#xUccH`jth4l1lL5$rqlr<-m zsHZTPi2_|u@ad|sE55h|GLpqGY#O0SO)dAD>31kxw)B&-bF%$_7gY*4*2$ zrMh=$-O@ZeHdD9gmM~I5jId1bomcBBa_uuXE49ZgXAxH}b;VvtOm4iVn=B!)O2lnn zaErYAdt>nS>p0OI;kqwB>2h0Hr?fr&NYE#L5F-WpvWC@`CXTPidG2uST^!L-jxWA) zuYP}bNAPb=eX&E!D=d(ZlMs7OW=ukR+pG7nGRARL{Z#99`nv8wyhAA?b>c=Jk)fGv zkJ@!&T8LkNiDXnPIRh&_&9*H)3)>Y$9|r2OZx?z6iw~7Uwq9@LW*>hSMxvjj(!`6~ zJ$D?w0^cKUu@VfSQ!)=v(9PD5&z@JE8u^wlqo*o#X2e^(YIJA83f{~;Ujf&2zxKLc zH;-mh4X#uFL^0vS@CNWJhEJfW$d;SC{4hGIrT*eY8uvYlhRNF%&NIZEIwN?k0|>)m zIG*kAErBg@IE9tC<&O);WTWNMB+b2do}K!N{2( z@ylFqeFb3X%V&>b&tvB`U6I?!mcs^6cp7sK?TeOTs5aPs-@bSM-_meQ%=oKbMQEMvY+QAyGvkWLa^gk)(IO`OYZK9?-bN0fwYCZLU* zz~@Ydcxln^n!d~r=77{}CZfvEtT?!PkJbISPOX8<zzlP*r^g5I5%vD2vqZJrDap_p7%4Tq<2ZmTQK=}=#tQ`_(nFm zqfn5;E%aIO0BUlLp*svhMA|(^IjNt)$$qKdtGSo`QkuoXuhzSn%y8#SvpUt-C2TER zuX7%^%)}e(#Z?`PpePqW4uBhQB=9eH{7&F+l67(1zYauv<&Yu>&_|u>R%@+~TPmN^ zwK%?fwQ#@STi#=d*N@I75^R5MuuGyn+!Asiz6Z~19iC!u_-HNOT2eM!d^rfmAuRPB zxS%VVQDbxGX`h5*!{%4xy5qgXqgLL-)@-VPFejL$2(`aHd=!-dS)>}JM>purDmTZd_43u zr-any7nQ7>;^U{#ySz6_B0k6ZN2O~@`yVrk^x~ybQ}`WKF))lk41rSjqQgg|gj8MY zds?qY7*4iz2mA92(Bi7n2jXr&vB;+TSmx5PUWD4#a-gKV+@5pS*Zm5Cxvstw57u-d zt)-X{VvmWmu^f%r#Zl`9mOS?eiV^6t$ zi}K>x3`vYna1Cjc#!xo>1JI#Iw1WjWD4;T=B0+iX^t?h zZol6Wa>01RZ{2t%=gVXFjN;knJ~B3Fg0IIxb&=EXeRrN>Y_a4~yky0-54K;$8neEi z8XPPU@pPY2;cx6T@W>q-sKUVDto4&eQj&(xl!X+x*#zsJR_P_o;OM0(;E!4? zMF@t&P0oJ?exchBNgRDNM-j~^D28{sCdK0!Knp7nDtJ$Z#HKsp5)R(hPi}xUB>|@A zu~8;@#;u|3*;RE^$yP*_?8g_$*=kS$d4-^e$Vji0?8nkfGIv--NndsJKnV|y*5T`d zP+Rde<@Kg^y1M4G)YoxV`(g_r%fa8&+zcmkiZv_hsmTIrKD=WT|1oE{Xi{25$%t|DQRbmBfqbfUqW zybk-VM2%H4+Rtcr$Y1tQ`86Rd9vey&T z7)wqr!p5n}D;YZJm59whJNH4~_IDPC=7R=mVRFSEebL{GEKgM2g;K>cwGrr$pf}vm z-rP8|bF54P7XtfMr}{`La!s-CbS;OTw~RUTTE=H|i=B-;&r0U;&PvSgz9wAtA@QB` zH4Oun*tXp}J5qIkE?O_0Z=+L==H{2w9vnG5!(KbHqQ9|IvUjK#@E4uv`B3P({Ls+P zl$tzB=3lP>ihuh0DK9VCEHU|X$Y46o9Me;Ju|XmUC@0R1E$O(DXV2wnB8?=8Z>`~p z(`h)n2{!f&kX)zM$ViyUWrxI4`yNG69w8H2N(!DgTrcS5MZsxC{NPJLv$&VqS%gn& zU6a|s_LH;JpRJFzmIpS3a{boPOFdbj1ggI><@?=_XVY+xT%T_H$6VJU_uFO?$2P~~ zG2UkjtdsLzzM+OVfnCz?C0KKE%!l@`bdTak@3+4J7n!Q4>-e4di2Qg0Ynb^Vv8H*6 z`Li%FME}6kH-(!B_)-1#n#5U+BS+m-F(G!?X3I9M_V#+svW>_d8AJDa!0Sz-imf3E zO{)Q1#?v}4D!?CnWUb})+{31{Jf^uDdV%xcu0wO%^+JQ4z_4Q13l{bSs}N>=#g-U8 zHEyT3*!XA53ha=58&NLgu%IJz@;nxSsP zO#?cNC)kYiccq1Fu^OgY2^#8d2W}lo$04xM+!MfVM^3_HzAYjL|CTXwYA)>~*U_BR z;+L@W1=LP^dTaer7}%6Wk@X1K31S`rAv+lG$FnRYbjfqr2lmpS?LwDnYzcpL4^bxxcT z(fs$ftS_tx-qZa0fR0%E0n5OfHgLnCUi7tje@4*V>SQqRjO|&GJK1j5UQ1@iR~}DE z8T_?l9IVgcA*ttj^?|@DjTPN-eCEV3Estx+K01SqH{@}ARg8I62(OLJGKs>lWUpTV zVQHt@o-$zYu7zdy4Nsb~cJT5gTDdzP*>NSnY$#svgs}~`NJ;2=Ol)+TN_;>hDMzW$ zXIZ6>(yB^WcC!o-pKkhAa1*5Yh!8`_{N|!4rLo)?>yadS8|60d;l6N$FazTm3H5IR z&k#1Frt|Qd8(~K(`WS5D*e8{evy4sZ`aUhYyfqVC4Km-tuL;~FUtm4-weAMxp}P#Q zBm_)Ulv8w7D*1$lAB4;TOw|@R_zT4J*IKj2I2rlinh4W0@uWR;B#&-B&4AD-?@{aP z-(G5zf+{QzQM6#h?u3$iAwk zgDWw5wMekx!aBFfG?LPw?{?y(Ir?|bFyqE8CvN*^GQrD-xpbx-ON1yinQUkCAa+@kW#r-^xpv~-Nh&+ppWp1o`u zTbsjR`Vo+-67L_mck?DwDCy_~xx3N~e2*~b>#VkhS%z7Wvd7LkKCxp@jwdYcvVG60G!8P<^*Sb z>kYGPH1kDVrNA~7b=C zQGJ%P4aDDTj_?qlnfojn7)JP5OA?27R+5Bn>3Wz#Yq9m=Q^C>O(ok2L`}W@>rlt^LxWbW+@(1^?UeqM|WQ#stNRSVR5`&r5=OuouQHe zGRppS68zOZtftPSMn4Jb-vN&HDApM$YvA1Is8l_o{MvlShoXmwYrj6&$|`)2daXwu z`I~_xnHZM#TSR?fkwJimoQI??Fl0{yeX}5;Au=LE^!=tuM#p3Hd=6baou_@| zZWg}aUXKssjdqT0X1Bkix-0=eB52|6Uw$}oRez~|hmZMsox~&#A)d74|KsT_!>Zic zu1$w5y1S)Yx;v#)=@3CcT5{3dN=lccG)Q-YfP|EYbW3-A6Q6f~$Fcvqf4HXezOFIO zF{UfROEe`BxoDP`n6yFt?;TM*4}5rf%F%-!x)aA2$5J?-@WSr$`m&lw~IE9h9kq9gUj8RuCfn)=7= zFu0A|=_?|cet8JMbdBK%-9Yo0u$qjb%#*et2LW5FObJS48Ky-xrpeOF znDB4~M~t;wHPsgIE-a8_bX(XfF#}0oPu2^*4X>)9@ zLRL+2r)B>^sx$LydR8}&N{pak**B!#CDeSVJ9nJYm}zhp>U8XJNdYbNGZ! z|I$8=$wI6v6o0L1vo<$X_ zNfrD;Iw~w0vjPM$ygf{==J6;ZiwCmMOD{6eO4CDE#$V;zBZ9a7W{0xmKTG5I;fit6?vhBWwD8#2 z`w?@4FbLO8G<3aa*x*t;{UBG>kKGP-FZY&mDEhIBw7ZvxW(`h4B2NC@K86W#Iw{e6Lg<|ymqiUr)e>*rvvd-|vL@TpVfwPplp3Kq5rY`H zkhQQQ<`uLmfp68VvN6vv=cHhW)r3r;MYeDF0=@WZ*{3RfXZ6e?{n(w7^zpK#{w-hg z8C5SqxsCT|U}eN!0w5{3+3q9mj1J@Akd%x+QbjBw_{C6LI$p=opFTI6(aW}&iJiIH z8C4|{yTRqF$O@_a#t{_OAu1JUWa{TYs7iT5(qIh@{<<`d65}Fam3OZ;0L z8uwC*mFsb;eePn>mafkRYt0j*g@3gF3_H7Xp;ffpt0TPn=Xw6frwX6hNAKUiC~p>xfRa;&$PR>YPUn%oQQIyZ^Ziw1@fyPChu(AB8g2Gw+`R7E zWf;gLjf+7<{C^OAI)F~+B0$IcuW+g(B?HFl0r9#{yPc2lS{9L!okLUAe51IeR4(=e zT=}%b1_nceBdYSwG6GV(e-$E8)P;izrv#p}>A^av*m^vT2~S&tnwecu~; zO(oJv{=hw5mjWN?V~AB&{yIJ(JhfONn0XKRx>?O6QZ<+v0NwJiQ zml!b8&HLVu{U1kHj#0+$$6^_NmgH^ZI7$0#MNtq;f-~G|Tqd@Fkiq5!2bWuRSR8@- z&Q-^Nm>Xy9kCivU0Q8PP+_#O)`@&(M;jq11uWZxe{v<6`bde;x@^koM1AC9&JMlik zW%f?&*)>OZL5vpo<&{Z}cV>P^q#45PsJYmL8ai6REN;BTqr+0$zu~Xm+?kGko+R%e zTk>y25ce=%h{NZ(?fNo%N;Gj`1KE9Gl{>SK+X2=b-rg+N=$(^F6%^H_-#6IiCk}#m zC^TCn7(5~ibE-zsRRulZS~z)W)dt$%cWm@8d|-8#7P`J{@etWOklBr9bsibVnbpjc z&E3qfu5VdxQ`Y_Wj}wY{{<3~SNIN-&mV*H|LXk^BB}&>IiZslktntb?Fyjo5j&6X4 zUKF;eDNP{~A}=Mw8Of~~Pfi+f7OqZ*a7&}U<}YEw5Pa1_L(>Uw7Ql{z;V5nBO4S`C zvT}If9}n}PH~5O0j~#MtFznuQhqZqg3lmeu&UsO%2XBFy5_NQ1qt&qGZz|3J9Ot}7 z_0Z~d((hp6?*ac-%d{?>G~KCn4_(@*Q5;m#n0P&AH0~T<1^w#gK6gX@aZfdn?An{6 z_mhgZNP+y7*9a5(7w8Ac1G;2Pk-;#+&~m}hCxvGW4R%7tit1Z&^eG;o1hALTk@_vo zryrRT*P2V~@C1*Yjs*R1!rD#3dX_7C#%4_Zq{k>%Lwj)S<{F7w+Koah2~|5In`o=u z=UvOSZ8wJ%=X~D6XV_g*|GFSDXD5@{9x^%-xTeK1Z`06v9m{gAQ#?E_J9n(5&*Mea zAAN_F4mE;%J^o4H3cR#Na*U0XUN8N~u4X1N+d$;-K=TITN4bXF9!xDt(Y@Vn&$YCP zK1y^a1g=y#UvVf*+wvpfjhhb{pYMW&={M^im!BG{_8;KvyoQ9vyP99W*1z<4Cha8_ zQ0=>Db~Et}kcMeM!=1L=CAB>-t|*mpbE_9J^8<5@fg-9xIEJjNC032|r24B@C|_O4 ztP5zU#Yj*~oCb$RcdWDue=vas9pWz z$h+Z|H2$$cz}jekw4@v;*eJU_zu|1;n)xs!IePztz!m zQR}OS3P`>D^i>G2`PzKxXiB#%*8C?X;c<_Ri2%a$Qp(`?>n7s={iKx{>`F+iJmEAR@+(ewP(SX;!=_bBYShF3jDNv^5CK%i)}Uf=q91318g z4XXMt+Ijj@#p5K&7jqs5;&QT2W=d~s!GvT}m=*G}GKDK|JjzyIq<9aPXktUdZETIp zH@l}mveNwYv(4ZLpwj<|)mn!41=^m(&fHTPnwr6rljezU(bQ9uBljDL=ww4^h56z8 z`VbjehUE>Q6q-pR`8fIUN!dZc+iwf3zvR_%5(*liGBS=zIVh2J-!DyLtj-Ne&<%vp zbPiBFZQr*-%*(MW={YZ)+k#=< zd-HN$TsFuS*DOh~{WJ4I1CH}18D}C?T65e+}2|CpFB_Qu?tQ`Ln?jle}kH7m4 zE6-aIghv8X)>x?6alsIK+{Z2-ZTLa+zDf!d{LNJ2b_kY@Y5Dx%$EYSoWMr$6uncgL}k1g0W6( zp0Cw*L;60C-?!%K-FV#{jAIV<{9+M5v2xjG5MSKY5ZFX?6|;Yr)`4<6cmKlLcO;^w z9%E$wE@$OQ@N6>wPwOKxYt;^da;-I|S8sZ%PzlfTg;w}=|9odZodu0JJ@sOOyvXBoa*KSlngAFF#(y`{pz)Z zGr3=p-96!cie`uEj5Bckx-)xCagY_PCTG&FQ)eT?kgnZt$#Uos& z6?;x58*T0vL#~$^OJ{7sK?*-cT#7A;c81`Lwb3ERSFSJNj+n$wggQ%O3st zR=fni41zbytndz;1*Z2^Y~P|1X}snD_;PL-sNfgr0+%(9E0N!iUJQ#g z26q4lSDn{!e5`qiYaS;BU;8 zi^j^fkU93!lRYU2RpseLsROj&s{V1<4>lRbp4)XDouuMoR(S&(ROcF=gbbXxcvF)( z!Hj_f+}rAdTMj4F@ehu*;_$a?$VW?gX{okT#Aw!#&W8Hh>dO|Lx!MgDyK7tQ&6wKG z0%N>k5zN8`JWCm`uG>{(gxkfuDv94}nDUwhq+}Nk-Md`AoUOCUsV~3? zMSR7LlrD-m@;9!lW{ana&l`jGd-g!0ieONlF3}rksp6=d;3~#&WC~;B<4a zNqK5eb8F%ze8l;BXIPve^3JiafgnUt5d~PN&h9vQJ<{=61aWz10+#(q-~hoSn6-m$ zURush@&xR5g~(?zjbMqT=hWT9d67jpQ~BE=<X&FYr9GHzgb}Qd|R`!&;&557s3kgZ% z)SSa+LW5*Ff^Pa%@9`6Lw96=EK)$2%vgpOG`cyKA7)Vy%k;ms;oswT%?X-&j>d}lZ zWSjY}H42~bgn9o4OCVePotnn$*HNo^a0Lau)S|Sg25CFt;V1$9v*;8RDNfgC*!#=J z@O+g9;k38kdX`qmGRDdJDs<2|T^y}_xV$+p-W$b?blitqEVg%TTGdv0Xqu={%;GNJ zH6vk1v@;H>NjRvo(gYeg{$0liMrg@n6h|%L`h9dNf;zA!T;;j>HhS+K>A680Drs#I zXU>!~(e$~7k?F58^8Ryg?9Qcz5L4tr0`i+rPc=Cb@4B4ee^n>N{#b;L1DPBbsPp3qA9~e*N>iG}Iv( zTDkq7?eNAGoP;t4342L)=MxuoeeGhXc_}d0=80`kFizzw_>`N2u7>6)_rYlrV)a|erjGm{R5nfQ>WmLQ8TiY zsIy*183eDcW%9b(Bzd;UX1Vvh<|npu8#;g*8Wzk1f_Mou)zVl z?iQE3_BRv5)bS@EvBKQc=TT80<+Xk+=m4I0Ka@+_y|OogMNlccj?u9VN!0sUT&Wli z!I@;AqB>Mw#jYgUj!awg}}2^(bZh6u^`4hzZ6 zHb`UX>IhFqle0$ap!t7TaF{rxl%@?ZTI2}66h|DkWyxK3-T)plOcYpQqfu9_! zu-scLDEAE@N!C6QO?+iTa`$_7-y-s?j6#j+yj)1z?GTfq|4$ajN(;x+UT z8T_swWh_ZzYirfs*5Ee`!V_4u@l#42krCQ0m6sT&3DtSe6w1LOhjTiClR zffD=-b6Fz(@Es2@dJM#DQxxtgv1?akQC6e5^2L`;5c*M%RaeqxyP?hhw_9)4TSm>oEg;p5Ue@A+`C;az#-qzxy zR9MOb@+PH-{|Y!8C{Zb9!U zknp$Scu52~6{UFBX+zqe~|##~jkUZXYa?Kc$8aaYv?VGA#JzPBQ)^ z1dBZbYc(W9$jFUUF9zYo{<=?iFwNdJb}o;=28hMV1yp~nVf%og_)qcx( z`#eGYYi4TEk2ld?qvyu166lfgUxf>i29e36vGKA)0zW>Rb|i8IhbHefE`W>I;^eV6 z2?(TmdbxFVL<{C@C-s7ZL*HfOlxWNY;r|?L!YspZ^!Ce92{U2rXsm=|qDlG}v0xlr z!;LAe;+BhD7Km-P~5t_443wzB)i zV6FPT^SKNIA{F%@@s>L@!ro-OD~zHmF@g{r*c?zZk|`XnY=5oYgi4r`0q zydQJienel;eO?n10RuHkthTp#1DDftTbO9!)<%Wm4r4QJCpZm!ajGcByYKG9IRAV? z<}4lOA)2{FoICT9G`O@m^EiZs?EfSPO03y`paZl)3>Um{&m6SUTz!jyvwKy??$HbT z*~ybfL59>%cNh*#3c1bax(^du1@zRs>`Ix%;{5z*8VR`ys^PLK`Y=*bFT!Im<(w+4I#k5f3WI$eYP=pG@K=BoP-KawdOR+||%CjLay08*FQcjlF?vZzEP( zHXQ#MD}pM9=dP7(Wp5(~ihlAzQYIc5j`!l|vORi&>O(McGqwI2j2~D*8W&4XiEgh2 zs0PPn(bIS-aOi}YN0b%z{b4rBTxs_FuTxL_mD$CBWBbNde1J^JDNc5$rWWT;4im4L*sXDYSPkkF9d;0bGG?v5hcA}kq#b;MyGSi9m#zgR59 zUz*Mz*Z_)}Pb_^JpeQ>rKawlEzl={2iEmqtl25^A$L;yg1AXZV$VX$Vg3vL}XCi%HK3eHQdRE66Rbh_ig$C6(($TBX(#Gss&cCIM*v4KZj>Cx-^K)MYrs zC0gVFxZCR0R}#D;`hFS~CfJDT1VH^rSz4lKep-tsp$zfZxYaMjv7kvD3Bn>F3>@9J zOtf`TH;9iay|cr!qLWU~3|tT*R?-YDsF3#cM6AinTRB__|HJvdV4ZOUZ;ds)&&ZxqqIyF1DwzP^xr>E3KBvSZSOMbgR%QWqe*Doq7}m1jLML{EF3K1 zGRf)=yMfnCRtBh1(mt840Zx)xB+)<(LVYsGhL$baEl}4cPa;=vlS0ux>t~!q_gi&% zT*+Be3ASMMS%p&j6VGW(4VlpnsDdhO=Q^vSNENM?g)O^peet{l)70bij1-M`oFJ1I zcBp#QNl!;y($tqx^pwxo9tYI)XX0x=soxBh<&5%B_ve#1YY9Zev79FF5s+-Lh$&-x z&pZl>7-*TEvfcxu)S&1K0)1eEhbLRPBs-C`X>ZHv8d$=5yaCK(=&C zo{=%uy|Yj4!~gyTf$BlT;WwX1<%SL_E-n~UCovR9(I8Qi?*rXBYmS3|t*@FW2m0F5 ztNbqY1Zp2&+Zl5qz0-?Ux5ev6_eS0n&FW1wXsf zZ+7?RLsDg>*}A_T=o>``el|MpSCjs7?DyvX)Qnv4YWEd_1=xq;|4s`2qRyiR@6@b_t^Z(2d0Yj6r=L%YX&V4)9VC{f4q=9+Xs zk#Y;D{sw=S^pUzqx}Zm34p*{W73IuHLf;>_;Jm{%70sM;Qy3(1BfCPAN&9Y5 zt-u%RKnkdKZqm$saA0H|{8wNAXzhnJqHdA-Eqoo`-Mgxg!X!qG_Th2fo19cLt|j!R zbWh;wvtatI)zo)%$@ja_344}V=0@tjzz!U}=t1)CqGcsfKJli-MAP`|V|*Y~`f1qx z7x(%eyyBO7j6=F7o6GQ8{>-%?M&AyL0wiqzGhHuT5Yt*yD z69gnzPdwUNu`tD;<=#XyN>hcyEzZ$a(RL#qQqLy>0uz4i=f*Coy611Jw=uh=N{M)c zRD;_FFffm2iZYVgc@pVG7~+|Dlpz`Wu5JnuXTbu&VOO+tbEHmO)C!7`KixhVA#q#H5yX>38mVoIikYsmAxwIs)q2eF}XE7io z5~flQZAd<(_etKEJ{WSGbsy$4(kT9}Lm~VmpG}$p=8mToqbc`n7MC}KO!!=B2pNke zX?cSu>&Ki&FYYJ)>>mG(upjS!K=oM#{eSGKne8|*kU65rN*F$OMjKH#>hX+x!i*lY zA<;W(Z?62b!PQezq9Gimj8;GK<_oQ`AVrhS(BSx(gqJg1ps%5s-vcAr{&*LML{JWl zxQF0e_r*3eL+Fa<;y^zxn{RX=qV4q`=nkEa-8fbM0D44L1&YuIqTfZ(klE^NC#PE> zyVnA+JaWE)eIs(6LmQ0U_x3B$W9uu!Im8svWwV1(Z2fvCiZP=HeDU{2{`f5N<_9O$ zS9Y3ee_Ugq z@xP1uA^p2weUZX|ba9BJ>4K`0M2Azw@iSJs58bAEU&A=jE zjf$QYDCiHNQL>0zr~0st;7Pr|ZG6Y~f&0ycA7wTNMJyV{K+cbh(8?*}Uhiz_)S8D_ zM4oDo9|-%u>LFzWugo{sD^hv8AZuTL_p}bRarrq@st})Wg@Pxje1*kh1Ku2P4sBYL zXV^qA4Rh-*W%mz0R5G7OE?C+9s7FV-@WJ8RwC8Fa|3qYaffCBHtgJ6Gd1n(%d4%&E z;)QQ<+kv!40U8VBn20~#*r?P&@u%#6cQINS>>1nt`1Yx$Le~4P%0cYJrS{BQ>;K?G zyBmm^ldLCkr1CQjBb|_w8g4vWH1+uGsJjfibt`KcdUhlxMJJFTjxRGz&I+9~O|zIo zk8oNU98r;$eA8TGh=)T12Ue+|9CgZdyf;jXtg}ZuW`*EKWLFVE`L> z!m;>UuzcEkFr;V6f|`WXj$t8ymU|+y(8EMc_Xy+G_h}M!{*TCd17h9xsOyiYB~Muh zUEQ4WI)pyi5Jpy3^<`mpcJ0z)m?!Ijc~sh6?wL7FAU69_cR;&B97I~z5U*yIvI5qDI6jlQ+mdts?- zwQ2X&r*m?K4UdRo6k!P@JF)tgXmhn*VsBpVhJ~P&bGnSmrwZLkkCM%!*2=r~1|ExY zS{x0ea{lq<<)oN6V1n!(L;7EJgf-!MnRzUym|FeR&1~_IdDKfh%$DpJvj<>%e%0=8 zB;clD?ES^Gcw0HZ^=s82diPf34f-Ptka)wY@0b0pF5NkAuQblIhfDQd14|a4N6yEe zD`=qy(DTFRYNiWS7<2WWh;^QVH*KZ=w~_N1O$t+wS2|>^+Tv|)F*CG_Xlp0t8wwH= ztXr1oBr;@Y3`@UphO6P8Qa4o3LF#WBzT7$ffpUB*WhX@!8F@0JrcW;)Mk7drbu1}{ za&{?Uu1hmKTyM+ZWTSzBV;+*jM&J1^LEGej2zotURZ{jxe@6 z(StuMn|*Izce#uZenG?zk#s7;;exlY-J$M@!Cp|;jM^O zxUKM&H_M1(&c0RUp@_SGTnn4eFLvs=@lXF{0gE@{`Q5;CXCQhX8y{>(<`UC6+{R8+ zyYk|OO|&4r$YY*bWzH1|JnA-}c*zLhvSJsjvQxahZ~vy7?+5@~geML(C-$r@Eqzv4 zNY6M)An`grOlpVu=VoprBc&DnE0FccZB=2~@U~Y)n#47aPryRA*Au$Lj4o)oN3p5@79F2PmJsp$c^OS|L(q@u2`cjZf_L(TOn=lF z&o!C~B&%)5rRkD$cJF&a{vJchTL8uBn>Da4S!tm@&XD9qCT-=h^ZS4^XK{^aq7WU) z%~QK1%yi~JL*Lz7!C(s;qMJg-^LJQ=!(5^T3Ts6zeSLO>a`|}M1i2oaNET(7~#CaJjDy6{#jkdO>GlE2P2<*(- zsy-Wq!{b(rGn*_r{*&EOiFh^%pYh_#GgQuTb%1lfYuFz$JdPaz4G%|^g@T}~vt|MT zX=uikf~9OIm5@Zx;FaTB9$}#DI};SXTGVGbQC`(<|jOM}i_S z)d5-I&(0(YI&Nn?!-MXO5KanGgmeds2&Z8fAW)>$&C@$>fj-txh)k zYDJ71-L_;YW`Pd< zEJ=P)x!EKGF_?0szP6<0CGc=s??)4Ir$Hu2*YW%y3$wJ&%dgGsU@yn!l?HW1|^J6a1GVekuIX(tipm*x>A4bKiwN`b{wBQ-rDuhlM* zaU_X(Bq@b)M9D?l7EV+8g~IWrgl!JtD*P{yKcbKv8^GRmA~3f8&~<)wi87lZsp{}z znE5o9ZAeZ^g>!8GiX=PsAV4i8y1PMo&uM*>pi$@%sdA*{cqB4{Xle^+G4J9~`6wCB z#rn}bacc+MI#bZX%$HhM^k3wr)Qm~D4MO}=1%*cItrxur+&Sfrkq)wpKvQEllQVFazqQH4DM>T3n=j3eAQL}APf|GJhfqe0Bgd`8+qQ+$ z3~G&?R4!@v3QH<)X=MkB!_ZLCy~Vf0Wv;C^Y9fsBx?#L)D1Xn?&l%0VA}x@M;DD?3 zkR|o)DK+3O1DJr}*afJ5Q#>$75b#lo?ZlZUq3FA;jSew$DagVbN8Zt})ztD};gk5sbBYJcUD1m1 z85%XTwju~dprW81NtkOQXeKfwVtJ3Lbwu45cil-4vv;O736 z@EI$1(2hG)u1=UzNFD1N5o>go@;vA$_=x>}OFB*|vRwgaVP^+=NG=Ic89i+ENo{anGqZ$QfXY@X#NI(jscwEyDMrpb7*mtB zHJ*GU%u?K&&LcxM{GNI)YcaMKqoZ^F9;?+5qx*K1qVeH|oKKA+v>IN#w|xG-+Ia`X zZux}x;=UC^+Cc0%Zb;b&&e9LUDnwVH zSipmL%IEcsXbyS`OwU8d6vuR4{6oT4+3~GPplvzak!Dc_7jy; z(%XwW*!n&v@hA8BL<@WL8z+QMs9LI<56+UXNFa16ySG@9!(ug_AIw~y`sW_c@{t(7 z@n0F0n{XMBXHGxmyFtH3LN7-+zH&qe(Zj(#j_>qA#ltEr)$5!V{64ZKdO=gbo5 zP-V-xm`Eu+f~`C%9z-*4J6Cg}lR5*&Q6)(!6%=lsPw_Z}q1%q=IIGgw*sJ+S-?AuD zur;*!@F>FoA_P4&cBPm-~k7|&u9)3aUm4wEkV7f}XhV$nIqDyO! z-1SW=?!!@9wTKP7TM6S3jKt40cg|_uGcOHL!&2<}JT-=d4}#hVjnh1Eecc%oBx+O9 ziIIk7c30i$F}P)|J}6`6&gR{3x18nNoTecEavWnpN&po>%p?fWSf~;r0RXX zGD?Nis2G&!X1Z7Fr$=Q(%Pc8mTm$k*QOdlIV^!KHfw)iueytou%@U!7YWuEIpvE2> zI)994NaiQ45-*AacYwX)LDIpV88aj;Y_SmikSbb8D3)y%z>yNueWJ$3RX!WV`B;En z{(WKPbV$_6=~53xsF8fDJtNGdBQ;pIN7ziN<04h;%t(d!j^H`EA7=JAQE&phn7`~+ zLn`N$=qS+%*BkU(1Q+=aI>O1fzIRkNP>=vPyyF}d7V&QHs)%lQp; zF~|WKX#||$q*>$|eyv$G-{?(=G8DW{Vq~a`1Ru3o!u3rmHa22ngOcHQl^%5TeZ{Bv z)ugM&Ih$CWgKHWCxx_A;B$_PkJtz?AmzC-f1uTiJkO!-9(o;wY_9MMx9aI!#@2 z-%7qx=@eL*!UmECN0)AL#m+|Ya!tPm2)f#1!?jZ`fhAk3VY*+A)IDj&8c^-SkvUY8 z1yQtbG!AXig|~3e#KKOXc)G|We@`b+7mTpHi%%`=L(CQ^7vGMJNFOUb#T%KJI=Qu} zy$~QcaPBm|#X{(PL(@@AHpA<-+x(U*W?>1FK$M!K)UPfqg60;(bMM)}%bwF@gslRkoHX3R@Ib{no2GSJX6 z!4cbND5#-GItLRvufl75BnevWRluVUp%qsO8dSb@au^}JI#?8RbP){SCgP{-^QP+G zg2O1kfqX*pjZOfE=3)+@H>J1)s=E=gQ>>gnPV7N2L;zqrVKY%-J>&NR+&=c$UMJQW zcQ-DqH9P(#_a3~# z-F)yS17lYJmlFEkEe_w;v;Yy{b3Qd3eNzDn0mCW&Y@V(l_D^lyVi%m*?Jf2SMixm) zcO<~{O@l7Uw~WlJ!F;bnN7Pk68rUej43~Ok4taL!q8|S#WU?D>Ix!_*>iSi|T^Gmq z5h)G1XU<7hd2nqY9wcwBROdW7BbA;WdACoOxM5Ah!c6u+f=OphN5jKQkeh2fJj8)Q z9YCSvNh2smhTSEGGTFEt8mHT^wzpR7{#-un>dBpj9Uh<5+2689rc`_VmJV|9K{iLO zqEF^+FgbZta&y|hyqDF5R_F;G<7q$Z2fyIW0MZc2c2CR%s%Rwgw zAn#p3eIfAb&KMf=D50+cR}(X68p8@r2R!{oe|sqcsvgQ{wrRTQ^c7byN;S2&G!_V$81Q3y&NC@QqDlGGA*G?_G7c-r z=r~dACffhRThC$Tft^n<4tEzpIJ{r}GZ~BTC~_()vUEZ9e3csq-fpGFSG2vZx^umq z(tN{Oy2@fl#z$}-?sVY1vs}Z76Z$8e$oib`=hy6L|9G^)h*PQQjW&ULccGsPK7oiq z%Uw4mzEzIfVO4)0Ua&SZ%`~x) zQd+PscOL*J2fDm{l zqMQ1;=!T#u@!-J7KWEi?RA`#NnqEEs3xNyR;*PKJf6`_ z&TS|lqTeYAzdQ}?U&3%I`7y{bENQHoSEJhCOv5Unp@b9ICW@7g70|dX0UCO!SbOC& zO$6vV$+s_&w8%fDG<5Ja4+%POqp+WbC*i5p|q-oi_k#7&DeVMk={h`Jy;o>A0i6)PUoVqNN+>ge!iWAJsH=clONL1bjAlp~jq zeyTCHy#}$cL#jtLg_-H;oVq$Z&`+BqASz34#KR^^Oq|9v2p+^HAt1!FEHkTiB^XJJ zvf2nKX5`R?U)vp~sZZBOk1SJmk~MMD;byHLH7ieVY1D6ya?woqNEUEpYTYaE1^&hn z>ve`t+-8(!aw=L8Hqs&nFsIM}wFtWDVrL_k=3;H~%e{9g~>s5B8 zNV8UZ^=W4a-od+9rp5}=3R_&HI$W76QI4ihz?aR&1X)j@S`K*Aj6peC{S1SGvSYh1 zi0>V3?^W&uMo4IzMUiXcC?uE1)Xemk^*G%p(uvF-$oen5DW+IW`c=wOxJGW=Acqex zygD8~=(~I2<+B-gF`lNF&1|5a9UO6vig}e-&X zXNs=&LY7W)lY@BRwt!JM^bjOA{)HgFXJ_24do~l`%@%*UFfrNP0GXiD#Op{j4dc9w ziQ!>&BaPV4rO!6jbz`4x)JSc}q@^dLd_B?WUj3NolbC{^+uEuv==6q~p9RsX5V5#& zCUdhAH~>qHFjyX$iiZJpKFx`dWvbLgFwwG*T6-;ELY*=g$rDfn2Iy~l;6$+S zzi3FWsGF!EyfZv^fhB{PpC+iK9nXzv^=#Gp&L zRipHJciysfDr`q`!yo2z(Na$&Djl=R$R-L5Y?P1@-VeK+Km2p(hPASi zf=&51HhgO9c*Od&%Jhdmnb5T;@Cn1mL!rUMw8EwbTYUO2rr=HqF+1tMErDxLMm3&X zGlQesY3fzR?=wUnwS(S-`fM}T@sx?RHsq)1KPa|=0G_(5c@Y$e{Gn$hITjP6e4r2` zJt62na|p{_q5y~jhuP2oKiKk&-!g_^~af(8S>w)q{4VAlBH`OlE<`g+ag_{R1MU6bbQ zccsipxH@SI_J&2Qub=)i)MHs1uqp(X;YWIIgnB;0LfPwyP3vL?Mby~Zsnvw^EdP@l zxDEHRq{T{|ZyL#&L31#aZVxMo0I{{~e=66p>hokARgr6~B)#*^0KGebrhaeMnyx5$K|a{!P8zD=fU$K~hhH)ELv<_6*WaW~Zi>wO!_uSUy-;dao$cJj{^F-E zhslP}&I=6mBZ=^i7U!n?1%q<5=%fwb#DkkA8ttZR+pucqv|lN6gH%sO&@^c0%M*{K zW@Klolmc}d?hmMQuA^wcIMfo3@Mh`o10AlWa{$veEfR&cks-X)Mmm9<;_)#3OZ2jV zHZRoZpw;10X1pXkS_XdPk{ufbRaM=mQ>Uf%DL$bBEvmM5Dn2DWA~2B5*D7EAn%Sru zha^RrM_ZQKmN{}dw}_rf&PY(*Bp0d!Z?Vz5dm^P>vNZS#k8rD)ybsaFX)qj3R#jJo znHeqVRyY6G2#I0ISDBYK6wJ<_)HCqI&OF2mz97)B{UlAm=#X;pq4gQNgZYN`dgYH7 zj4?Z*)7@U>5$G_ytMMjgziWgW3H{K0mV0P+ZTN2VGf2R_aqYowp-f084*i&cP!Ni| zGJWw~;B4=WVS6j3IP{!J?!qB1>wnp)K2GpCZ~$4W&*DKL7Q*J=*Z-zkpYn;3-yw^54j zP{@!gxF5VoBT~=K$Om3G-XH-mf8yBMr1|+f1G9GLkIK>|&UviEBNCEcaL;PBraK!O zDxUEw>FB1EN$WC%-t^245dT13QBaAIb9dmb^R$_G^PF0!6MpiZbp7xPhBTX*V^v3u zT`-3^?Hv{=g}2c#@U4U5h@p*b+6j^a(?jnph9*=@CNhHpXuje{`;^2(+e|Fa3-H6 zFJ<+_8*`Yde{0}8EH?a|i+uzTp4QI7X@_+SA{QUirPJq_Z z)6*-V2j(cNq+_NSRl9ScDta`!QNJyJwI&W(*@zK(bkZIjCp(9ZaQq(7?sRkmPgdcy zm6G0Sd`yX*`~^u|{{jeNQu>n;rj1~I{o)K2m#G9|;z$V&LWD3IEPT@7f&#~N%jZ-a zJg5l)Sx4tJ@>LxzBqSi`N%edV%_)~8_@e?7RiZ>SJqpIco#1CxA-?kI`MTQLI zJ(48WZpN!HAly5p%jE5AT27kxO6b#!*%x0+ly}6d3zFCKu)ir9Y=`21a?`oL^-m|^ zQ-)!KbyvtCtV3+tC7S@tY9Q!Fyf1Wznf^)J(FpfYoi^tyh?4zi8Mb1QC9zD8{De}R z8F`_A%u&>L;Fnjd1*>eFTQ{A2PN|#+AVs(eDZE89yfkne?9r7*loA*uonDcA)clx` znix!1NXM$G16Ab-?jtT4{H`i5>F~S`2Wu}ZmpyrMGfyTxEOHgBYlORj=J#0B#X3sO zQS_|IrInpY&D_&E1i9r@xI2o};gAnC!*Tyf%<>tX|qIN*}wixEP2&UL3ig#c1%?~!41U#XH zP<=jFi`%wX4fy45iQ0?p$xEwf6LHu@yKgJ;r|kRl-zZ$ZT=bkNE>&Bq`uTA6L3+Hz z!zd2#$?t_kBaefA_x4Iw0-T?aYi1XJl;Q+QjP!CWpaN`_m|4Ff?!nI=jp&0eeZ0vmOW&!Jv!#e2@vZ+SF6V+H3qss&Rn z2C+@aoA1^==${^tl|T3h$QiW8N-kwfXOF4nzPoJL<~b2nxyz#@s2~pP%f{>$DEr1~ zF^#M*`c1q&bs9dOZJ}p1ANfbeR6f!CrK}3k9EDc7aTjE7mh=6({FRTk{AisgW&Ujk zO1kj@=jgs*^x@;P&A7jjJ3+#x5@8HKI@Yv^?0yX;Si9*S=?U^ zTY-H}NTmLOTiUc7sTU;E7+Jv1_xUrkwr*lxodBf{FdI{na+CxObivY6!-$D!bF2`7 zCaL?TwMHe&nOiHlj8V$dfiLf_4V!E~s0ynPR5TKoJ}J$>v!ChIAD$hNY+}_HOnZ~d zD2O45nxK$KUE)iIy?YpW1oNT(oq&viI8866@n83&DS@XJJu_H}aP&*P_U5uv@^`=3 zOC+mbDSlGY1OWjJ4-1;&&``K?xV)WD#tknu7p$UUvtU$Ak2!H@ zvjiomW5tF%#B!{CKsoM}B$rRzr%xHT^$g=iL?=F&>tJtcd5 zTc!WU)K^DE6?S_|=M3E?(%m2}t#mVVcQ;5#*U%kGN_Tgs0s;ckIT9j`bbUwP``+*F zKh9#og2kHIv(K;gc`(#63877lRG)|LUePh#JX@czZhr1)sD6f8)3PTbZad)-h1kQ6 z`=ei|MRj)i!J~z3CccCtT{f&RtQ=ok1CH<8SlZL}6rjB@TEDj1ouswy);(G8SWlDx zR()UFERdQ?>BRgd*Fk`kHw{(R@a@nes_*5gE*Xy7*p_!*M0mo`lI zz166D!i|{XMyH`zH6pX6L;cgk7&S<<%S+0vc`R?NUVIJYyinaD_P#>24A?r8&StRf z!X|agT8wV1SwG@|qdyvk#rgDk@sxZ5)rGF;jlkaO~?& z>?RTuHNG>q*}z_ef;x<|Oe(cjP<99NoHk)mPYZsG70J(Lpx4Z1Wl8jNr||kTjh1aR z>n`ep7tN)nYa(mP(Ln@)HZ|-P_o?!9rIqJu8d^gV&ec_@Y@U{@F}uvX+64LO8O6$J z1?(lCxzb9%rF`q(wUxhDf8I_WKq$-KA!t8*NEIqMpYye`*xPh#%dQ}jfQDHHx~$*XJ5U*(LA}!LFF8dgHn0c=VHS$amd7< z;KIxgjkF%}Ro?!m_7fQ3V#D7vK7Rbj?4+J&);S3GY5hf-?4U%wdbPy`b1wUC9b^Lp zWh2BR_rh3JCVMbK2?=Dtk_ZpMWmQIiyrR+m zX<^hiYxMh?+)ZD`r@tG792|s>u{`03Th>Sz?I^zvcrQK3?*&qFfmoon>ti?Y53FgU z>*&J`up=d-i7&4VM#)e_A$q(Gn}|YYtz)!Ldc0)t@pyPSLE$c%uGo%tF=7;YhVX5$ zOn7P}GkDK*wGPVZeclWac@&zU4B(;%3z1ffGLf`kav0Pzw(reO5AH$a;jY=4_Brh9$>e`f_X^N4e;VrDTFXcu{IY(L`|thn zehC-*e0hL8yjEOVZrh6pg}vp}sJPKF+=sOfSquwa8>FZ-Z&NcC-cV0|w`<3nsv(q#`(>%@ zXK@9zIC(DhO-<4FIbWtdDQ!1w0-3BX6gNXUVX>8-ic~fh!JzYP1V*L_Y1cq{6;d6Y zpBJ`*`Z^j}39?>FbNcNCE%JmofC!2sszoi@h3xnU~Ap z*Z?m)lwnxjMy$-q@kmaNLY;9*#PYTT17-;bTR~(v@I$r`@^ddGG-5eH-m3yp z`hGKc;tR?V&Ez}P!f|rKM$7uJyraF4(Tb2RezG0QPBqO2vaGBPb6874ec-s?qN@pL zIrj3jL#ytWys1Ds^}g22!`VJ(+lY;FNybBI#&0z=4&hTv@V%7;^)EY}+aiX6UK{SUB~tkU#s+xLPw0^vIaRux(^qzrYOSp z(&L87Dy_EL{YJO90Ds$JK*`}z3kU#8#vW~;z)-IGQ%y5W?#5V_9*~IFPRJFhT$!vA zdT<*@4dMiHyHUV7f|n|4$uCAEC3_LjOde{}5MzYFp+ohVKWi*pRUi|JqR`o$VnNQ` ze8~A$U8a%3IGxFA{8cwXhH)M1L@?nZrwq--{6wk!@(i-qMm$J`@vPL$)rS z*<0LA11J1-!83z3GYMs*NcK`Zb}S0{%@tcMq|@5Q!5tNnP5hdL)%D34K|it=^G90} zkQU!K3iMTKzcJK6uPh;PawOBU945cgKIQ)IYg5L)Yx&upxnTy9Hii7vziAHtOq#nG zYDt$>R&-<`Q)a1g;A|lAGVA*=(tQ?|%Z=hMGkH>-R7za=tvadLpKDSCy ziGK_G9neq(&mU$)2_A)|10r)S!H29#u(?g1rMXFW?eKHys zqkP{|jpLO=qTESflmUVZ84eVv^&w$kBtL&edhYV(BY!cpoYwQz!3i8RYsC*9M(b_O zAX<4%jm=AgZ!+3iHly8vl_ee+f21!y(BW(}a=?+>pML3VaLW8 zz{H!s-^@N}9ReyYr;J-B<^5H$)L?aUsD$dr7j?^4_hnqQ-J7rHdGlRWrWp3c$ODLj zBPUkiBqxes)@iV*fXICA$lCO<&z;M5|CV^=ggmxw2Q#+gNK=u_3%ncG3lFtgjcte3 z49)fLZ6C)f)FpmZPB;jSEHQ|)CR`0C%D@~wf7fcVzW=+A{XZs|hFdcV9?u9uxYPK$Olf&^=fLQUT>R@x}S<+NkB>Nc7=RCTKQBX zkJ*1!)y|AQH!Jtxy(Fz+pOQ4R1mj;?+Eg;hVz_H~QrnHYt8$FP0^-OjW2RNL#uU_N zdv=C(8ChPPew@4~8)vLn6ipa!P&Ld-s5ln^C+{d}Ik*r3w{W1!^#7rjDVbJv4`Z@; zy8Qb0NlY4BYmUWj&qdu`KY1Vhq;qc%z`4BZiLIY|5}sP%|zy)k}V|Lss}df+}& zS9pSmUsh>r4mgZnotS-5RK>XXcWAO!9zgZ*!Wh(_7`WvLx zsK~CjvJb0giIKud@I#sr*`+aA!sVErM64y0BNI#}{-}RGg~J=YlIy6=&&cZch0(GE1IIIz{4FTC6Hyg4ZLtMfQ;%oIOT_CMQ|uL=kx$!+%| zPLJ<_!#jSBg@=6AKcSXkwmYW4S@7IG*7m5WNb((HHX<*woaVmn)}7Pxv>o#Lm{~;V zLc6QWE0OjDNeIL7r`R+F`t7y7li?f{_P*KcgwYfQo;v@L z>de=tBC*|2c{St2kl{UZhv#+R22W29>c(!AH4gs*%;Bck@v|S$>&9#2)X#*tcWdun zz&_O2y!d>b8M-Z9hyg%L;Go^VfXG6aN2xQPE3N+~&4X(U5t`*!1?y|$iM|30$zoEZ za>s|^B!u1A|1zcE>Foh~&=)$8d9j&NNy0G{a*4k(B7^4#!Fy3O5`v+){0St1krXkc zBnLUno36wrcGYqLy6o9#fB3b|!khytG2xiaDX?7M95rO-JK0C9S%xotmUUNRVvfPe z@7jN1$9%44tTlE|Dlm^PF{=*pb;QZ8JfBh5k4^8uZQ|3;Z*RembLiY%#s$Zs;b#z8 zyF&~Nm4E#dF$&E7KK$#a8=aq_VG^`u*A7wUo!#F)E1>)bXnV{NpJtb2)L^+{5OjKUR34biJ!Mp z1C^E21Va0<55Wv48da(lFLjz+fu4kFF%zJS0K1k#pOBGr+4=jo>iUiv`bYf;JF!9U z6EW|nDKFg_1`rc|H6zBXFsUi#zea*el&H@grLtS?ipJs{3S)^XV7(%ArP*%m+nXDJ z@eIOvB@GP`msEbMHr(3(thvXQ5$=)n+yc+xKZFe&{XU(|_I5Ejv1Y7Xje5V7V8gf? z*3sozCQ^s2o3kN%+=7)~JsGTF2ui7ldFxN&t#PcVtisXSf>-c{O!Zi~x7z7W{ zVrF+ri|X-dm2Zxs5{3fluvJwMWXx;U2@XuGPO>H z*{1pC@_Skk*;|p=yV2vA_E#YKF@O1t17m0$#W>Xk%}`B$zD8ktIkLPG&+@XJuU=-5 ztF9Wo9Sg=ZL?f)@s{wCAu#S;&M9G0auhx#cDyy|qHHY=ePNU=bw4P4AJN32j9NdF?| zY;{>=A^2WZ_w>;WSd9w;M?WuJbXO@2;`{sN^FwbSzepg(EhF>{vXTwX1M)>ZX&^pJMEKladh$dtmqq8QbKnVayTI-Lq!6?UnKrv6yw2(3$alP z=(Sp)lmZe`EsCG>;bl{6;RjU|HjIm**{f7Z-Ou{?1P)RfUnEi=Z`c}Ix*`eW{$$27%@16G?g|+!ZkUJOjJt>wb3hYD(gm;%0{3Y~T z{{xy1Y>g=0WNP{P^cLfKt9vhfb281{WvXeV-eUbt)q5LUBmV~{kv1Dzt44RWk3I;; zd9GwujpjH}^qitjeZHBznRj%?!Lh=A=KGVS`Olso`VA*SorFltkuQhIIM_oK{^GTb zJVef?(U`aoYx%FU-mp?i@qS{aH<{Qi)^tq`-Odew`jtelEWKEk$j{7r!*NM|95IX- ziAH$+VnoDJ@wd>8S2TpUAczbuFYZ$gDE2VZgWT?vN=bV9OFT8`2gM<2=%GJO6GP#t zFbMTMTU7Yc%W>p|Z}Dv&$F!YJq(Psoq}TW%*}v5|SR%d^1`k%rXlQY%plguWz>gHq zBiYQfzHxL6U8<4MR{$MU#7uwJ1j{tCBE@1AcG-yAS>D>@!|BM1#$3!mw1Rd?<#CCM znhP*5HPf?!HGO%y%8Qq^wQrPkDL0I@vqKNum_0&a zX2SZn1?TAg(JK3y?pMv4XOZSM^TPE-Wk|J-j04}Vz09{U^b^nKkYfX^@Cdt_F=`)F zqpFc6%QRLt!P2iyO78zZj4d!udoj}M2LM?&KR=XLNsIsS3KE11V+{V`xN{jM82DZA)E2yi$`6!<`*R}$skToPvRLvuNq*9`Ow z8W+EKx!|^QkMk|+5uw4fLMexxiqr9#3$fCD;rX+up@pQ?e7A`@dqpiKL>1`Xh2=sh z1iCqA7uU}DKR4gEu#;)|Ets7UscdylX`9Sm>8NS;P?B0wDxUlA+h6sWmdQ@r2aU2u zR=9~XUj>GqQ{^4T?7}qYteMjNR#&?1jK8#L?f%7 zVD7t5p=#b&BR>m>Oph1sIT>Kwic~*PQc$i(hJw4f9EXN|4wJsz96pYo9emC5xo@ZQ z@th}T&3eyLlF*5K%|;m^cT5(k3#+kQxgLHvTiAa3)oSu%41=p>Lzi3;kCRe7pJU|e z1X>`F7rfQ_@tZ$##HtVTo$<9!Y*!v%F)StsG8a`!Y#Y@vLU9(D1 zq^J*DgbYt0LdzZ8iCCRf3X8f8<&nJK)@4q*5Kl}}yDtBsWk-q?Wt@@*V> zA~6Hxq9#ue>C0jMp)dMH+U~x^sO5@RgRRK6`R~O$A5_zX@FQzE-k;LKpppmw!yAX? zcllYH?u%Hx%Ar5L;_^0WN)*{iYABVbp0bT^ec5O@C)b?M$nR+n1KnuZlrc#v$*})a zB2eZ1;-mXZ?|{R=Kj>Pdrg}mqfS`ZdpSRIb|3Lnh-}8{Kx-`{=Z6)sw56VdKp~0bu zmSy{48xlET2BDSwukG)wV~fpmYFYLN2@@qso|dP+NSuU!N&Z3lKfqt}C-jqz2wL}A z%GN&CykSAK@TXUS-Yia~YkI@E8Gbqx7xBZA_50WPKQyRl7n<)ppLo57Z>Y z_UAnXCbDQOJ#eHq{75Qii`ON4$@y$5^R7voXowWWj8ki*20$@f@I{mqcS_qe@^4g= z4PAxZQC1kf6uA3^z3_NJT?-QOCD5z5C@ABQ30Ml$Ogok}yfqrTs*%~bnkryk!7!Yc zXgbtSNUk*++|3|_<#-?W-#OO4(rE$NX>pNPaQiq+x~!~h^70O-rXb1uQs&)r=$|?f z4ZS9cADwMPF~4on=lbw458?ggoXK-6uSgmQ$AKYw5Wi0BYwaA{^IIggy%KBW@+35i zhJCn;OULwSv69PuZaDMxzJVg~9E!X3dI-u1RrC(V9V{A!VSDC&Wb-(B!E`kdgVTO{ zgEJ>fUPXn(P3$=9Jx%&l7X?qGaTJ&L$1lNW`+?m_{oe$(TN=4;N@lrw#2x*(WY^X) zybZl^WmnfxI7B@;$Pe?RRQ}GUKf;MGMxOmYaX8rt?m?iu8)pNciGZczD0Y2d_cMix_(hm&luDf2+!T7r6u zu)*#4c*Jcrs%1RW^>OMi$cv0C^n@Pyp`a36lVV=CWUMxaKbE8MG{={Hsyd+uG4Q#~lP#z0q#Yma+^P_YnWzR;K-vLmPBY)GhNhPfaq>?D^eymYGeZwUV`If(DnSOdPZ_Ft7PRPH?ih4mCm}TB1F9h2>gAMqtiFi^1=oI*rI|hxS<)@5tLR|kUT>mSZ2QR zL))ULhnvhZP0*(7>t3{ln}#Ek3VAD^w&arfq#yAD9wYnY)-SV=K@r^v11*W2@}=8F zArgwtr6D9XpA5k3BJxxVP{iL zBW0!hr`&VbfpsX(&g}hDJLqF^>^G8JPbbO$%iyM;-vSh_W~fK`^aNi$ZyTH3N#Kta z4?ZbK&!-~ujq5UcE;oG;tFyT_<8+C-WP!HSmygOs?j}|9lBFp@^}}W%Yfj+{mU4?H z(%jn;)~g)FQNlEb@MT1<=;#-SK--=r<-aCKHsfJZ5Sm*Gj<>)+%f#9WLO`}8V8#fP zHuR-W)k3miTSW3M0k7NhdS+is9(NT- zNxTS^1~>c2O+i2sSH3{RMxc=FY9KP80+VAfti>Y1YWtTWA%p*AMJ%UL}Y9MS9+>lrCl4fG>^+_~BO^mO1% z3vvb2i6%RE0>`LEC2y9ELziTW;+7s;_6U@#F+CLXcG%4#rD+j21Iv z=+&`z;qk?mVxZs(F_~K6#FX64#C4sNv?`*XJ;8t%g&7=6Ashy-m?*9j zsE|@rq(z(9Q;YNWZxaXh2oY;vId3>Y#tC%KSR}uw46`_eUwOA;_gH9*mTdjFeCaIdfCgFP%G~sKqj89>o ztSJ+|_gHUk#vq;kgAVs80CeD)xtDn)dLObqXyS@xqFDF(6Z>%%v$A^cHf3F=`k?5R z3lXg{T}=B2xYz{Z)x|!=-XJ^Yga&iDB|iEjOq8ugGE**QQ+~z*o9c+ks))4i7Cldw z+mEQ2)LN@M4!!a4247flO}5JaV)myrdaCZ|Q4e zYk^Qui1al=9_+&c1)qzJXjv|i6Q+!#^Qh^%z2MU{>tN2y`Jt0$`L3_+6^2rS$e6uC zzEiTqyk>YujQ<#!g-sc=guVvfpRe>~JDxVxwI<^%Y7FCh0f4fmk)5f6My|2kmiaAh zD%jQ2mz213apNU`8~)Z`$LwiLK6Lx72wLc1Ldbhju@$<(bCoO8bEy7JFP3?3(jC6| z!!4YDdnx)38~tLp>CpdiCJLUb0hXLV6i~WvvQb%7|GG;|oRyz!PDm7`55Tl_kigfR zyJ#?cRwu2p=?cvvtzwBBJ%nOCjAmJkIPGx>fUkKT!c2lD?IK3seK8Y=9F9cA_YQ;0 zWsR?|q<;O;O$G8$ncr5bKJ}wr*p-n)N^Uy%^Fm153Sw;sWqLL_gzVhJmOW%@H3Bjn zkuL|zQ;3&d46|57@9hS4>;ea^ILb<`8I4tV7nE30WmjAzw%(&u-5+RmU3|gwc`DNK z*|{h^n`z}RU+F(lEno`$x-_!hYW4FFl91*u2Rv{>!R2{X_-r5)50Kk1PM2dyGPzH2 zF}t?#F}r_Z#!^iaDU__oGGxRLC!-0NpXXz62HX^r<0t(3zz=Y;I5qFw;2DJXrUTPG znY=&Y6-ffo=rIMU@FESOkW!omx?FY4kISGg&rLb?9$zPkJ@uV2J1d{PBq(9!rGUKnX(nVT4wY#hsNqMwZUXpkL}Y!stzI7r z0c)}@uVbU+5NuSWZ(L!AH)_2o%J9>1X^Ck&1xhoht?#fLHTd-GSyYX+gJKFp%a}`( zzpGPi#S|hWrcwTUjIrEN_qAK^ZDS>in-5pzO_f;CL{d`VQ>?Hm%drPM49v7HN)Vr= zYReC)wI4x%5ZpAU!o0lSL$@=H$LK%9#j2teMtDMQ^GM9)AWBGSbXz^`?W&E4$FC+E8`fsDo`P)rJ#ueDWJoveur6 zOY!c6Zj=S{g%*|itCZ5yA3PUln*&=A{tx)M5H7sh?si-%Cx4=iL&La}4@PlV;y$Db zC2Gsp1`{NHf|LflsI_7N0!D#0Lg}&HXyzxcQHq?zq1By)mB$p}L{y(b{YtU=9_j@E zk%0vO%k3h%B2JrvfFXfAg1JAu#{m8nNCF~ae=~0pS!sRut1S7c>_z|Ae8v!;rJ4{YUr5%xt{1Oe_{f%NM zPWSQWg=E{cEh_~5DPOTM98P+O;;2%-D5dFM5pJ;SP$4zqCx_{Vtj%WPO@7-g@B03E zdzrDlh!jwpR|>jvMmorW5OMK5e>99U=Z+ltbu2pKsx~c+1noc|;-j(`-loNhps99i zesw+k9823fBa|E&6d4Ty&1#TMWm-W)TPLlDqYNd=NGjuQVr%MG4eN6cpeYY*9p+p6 z3mjpVrApb_jHDK2wM@ZX#Ssjd1l684PGxTlwQxx7~w9}BZn8=dpuTzA{3$ukc@8HQ_od*!D#d^5PcI<5Jtq~O;D50Y4%tFIR3`F}yRYp+RT!7TbZ9yB&^&ANh_YPNT*O3T!g;$p zd1c8rMT{(2f+6SnP+kzmF zNpP6R^QXx5Hz7AN7R2LN6DDzwG>+za{MH|0+qzv(ka9XF#p8N1VG1xf)QK-`=?rlU zSnY{9cf=|bfjIOaL8_F<+=O36?1LWxT4*&wkSTCXD)RaQT-DVnPWfqt>O#&qTCMd{ zuqn{w^#Zx^!#ba0R5%0)hPzclZhwl?xUF*SzrJH67dl|%7bX4HIQ3=;;U98l2U20R zERPCI^7x}@_@idz_;~orgJ$4ZzzO-P>JDMeDus5wCsL?`$P+POvB<<}X1B3uDdzu8 z>tqGFel@Ji%yQgFimL19j{k-zOm(ROzQ+OIVPCaxx$9w{zFvB{8@^y0u~s4#RgKYL z>C}i!XpZ5)E~-K(x0Ig|RTuE+6(&MRYfkMkRP;9N;dBw?B8=&h)vPL@Rn%UCuT?Bq zW~x!#00(L=w$b3tZ?2EY2nURf9@T=^U$I-zv&Nq z;hSEB9Jd_R(uRHtlk1=(ihcag|M`93PwZ2%0cMl=P@XtXYvTv2BpXc1S}f#75PCD1 z4V^Fhr@R#!*d3y1yRD5RphA2x#1(8tw~WvHMLuL0mvi@*Sv6QFmzlrMhWj*kSUU$t zxfF@A6%?32u@!`I7DaTF%30zz_bYjhoUk-y_*tuW+c+kdfMI12HEi+}8H5_~>GVbs z^)~Kpgu{N+?PB}(>p4ox$g3hA5(&9iv8sLOP(yHd1-UPZ4lHLX-F$MP(#2&%XYuOD z-RC*$3*>(Nact=id&7_3h-9{tLeC9k8!U%NerGI?;&#=;4QRtj&&U&&@nDU2-Y}L5 z$LQ#_;%I2mQv?&j-Db~HE3A3cdJ8H*Iigc57IV+A7R=?H8!8BUjzA%_-s~TS)!E!( zv*rlk&dM1?GfA=c|JlkgzpESPav!qH6yCL)AomosvNQ;`9pvb5!g0uD^Rt*i?dou& z&ovgKtR%$j9{^Oo^J;6;Z(0nncBslg%le0i?aweAkz-=o1ToU13B;J$zQw>FzZmse z-u?kd{++1&3z#wUCuZeqrekIN=kIdxuw2VB^DbvD_g8D3uV;$LJxPhL5eIsAd0#3k z-TJ>zR6u}v0hTl%CyULk$6JVeMP?HikZ?xF)yF>V#70ATedwvn8&&5ik%vH;f=qA8 z=8=sIsgl2e=a)7Q1jobv{4TeM%_xeGd^lYOp|(w&67#77XSS7Bpn}qn%E~9Cq?B;% zERo78IkuQg0HDO4J!mGDmj*xLa35ok%b$J+qLNNy#nKBP(>xbq)Uz-CKC3ie#ylmV zWhtgtmgnlpB@9lhKSmyTxEV%y&6=)Qck}XV4EWR?_cJ@>Xeak){^@Te|Yw`~R`- zg6gMFj6L$+)(Ma+NIG9rw`xPDymhvcrm##76jp%g4ZLbp_sr01CUytTzHRU;`aAr8 zAm%TSdE^1+#fPwe5;Mzs=^|Co(r%cZvvj3;3jDAwWEGG2@1y`OnWazyX2$m~VEhES z2x+ny)ygQ{5?Dp=uu_&#G3cP52z^Aoa#`simTji9=7B)ig6T?V-)5$pLtNEq7#PFUqZr|NN|>UrE}s=D74 z6ydj~9k~=H(vUNVC~ib?hlscj9AygM@+*?QXF?hIY3mU9)b_j1X5iA{JsY7)d$bfR zI&^CE=T?T<(t|0)cLP$Dxq_82(FIaTXb0CC`SJhA4S;7>76V`5#Y=2w7A1<@F(Sr` zBbTHohru`)TzpvJV6&<|&+z}Wx`aEwKA*WVwQ{zn*8oYPEsmKDh#ABQlYB;ERPMBH z$V7la1URim<*zva^if<|EbWSpdI47zIa_x#wqppiih~6#l#Ii6GGQrNS|lcaBkpZt z@&M({D!*7=!~^>km7C}*O0sQgs#jtt17IGJW&X^aW=qIqwaOTA^H3Ao{@JnrIO2b=&zb;bab|met|%_1 zM4N&dCRw*^I8CgWSm|Noq;VNEtQjK&iluwYs=KtK?bdh>DUVskzE6b~I($w|iS4lk1_~Kx$VrW9xa4PHZE#~8b^wb<3gd7vH z#VasS!XBxwgJ7?ZGvSC7g;>4sknk$&l$w-l@JOrgScjU}bHGujpxsLxom&T1dRc3J zxpED=Allnr5g>w%s_4AB7`IkyyG77^df>BgAT!i}D-2}m+=-Je z2J#G`#+)p!Yn^|i>xNCX?oX;fzaDWj~xwr3INV?FkTgH zrm~7{CxAs721nDd4}LBHguf^jPSZEWiliQ5geo043iKO6GI+K9-8@eH{`{Xrny|Q! z55mv~o$w9Ck)9DM8ImFms{)n;Vyv?oBpGuf)r;$8Qsf%2ai@!#v6HE;|3paA&Y z+yMfpa)mCwUDG+$qF+_0z)&W`kNlA%hzTpH4n}SmD?J#Lt@XaU+VCUH1e$hybEIq> zGLF0lru@H6>xoSpbvNCXZZ3{gnE?+b^ZW3Tnz#^5g50juZLU0bxXbt~3HX05F8=tx z_hrB`8kTf=9Sf>s8sAjcaxrmGBkZ@gm6m%oMlLz#9JHhwyj-$Yre)Q?rP(*97t(-G zS?@Qya`O}_cRG_YdwMwzoE*3?Rb(>WSvIK``D44pl^FCFk&HpUh_hU79TlLzpUk5VBlF^!r}wtpCa zE#w84o^+tza|n_Zz!Hv-N5=p94tItz$U$9QA|G8xNC0_FMf6hBVWowwdH6)Ud3F;z@Dj`ovP@#b4*uaBIOGV<9qty!s+UoBw5{ z{^HBBuzA}EhMzJ~pn-xq6jOdqln1XHQLC_WAUGJ6IEm>f#aA)z<|_%AF^Hp=C&l|y z?BPs(iA!%{5Eipd-ebttzxmx^ZXA_ZLmZ|_WyY*FX`q$FGTG>eikLxoBqs=ng&C&- zvwdwPuQ-sBE;>!>Erhh$Rmkvc*xB-pMzYG7If6!y{sy#wDpUA3)TOVRDLzGRVvQDX zwgXhsyESbq_xk!XfW0^SZjJ>$49wcQol1EjRHSLQJsgU0GW}ny0tK*2ZyK=i0X zddYM(XyN&$nD6v_77;r8Wn0#*vT()CjZ9ze7%j20pUWGac$~>_!n8gjqmZAt+`2w^ z(5!MEGjb3;VKn|_GvqV(oGH~ZlkSA6E3=_uaap-F*LT2$bR{Kz)_V(0_sq#Hl6&ND zyiQXCz-({Fr(BXmM5vN->rjV~Qmao3hN9K`VBpMdqhBXreQnn{+ zib3aH_?h43BhYg4glz2Ok8<+iiPz|TAeWU&8yudTad;_ZUZ}T`UPLM7$Jk;X=h4(H zA{rR5m^9PNA-m=ZKCu-sH9;p%=s@R@Gm2Ou$uRr2uaL=Mc|l~_A68lCzs%N_M{$zd z`zd~p(K&H zC%YH|(BZ$~M`Xs!X`BD$8WeC={eM@ta{8WIC}#7b)pnhu!w0{yJ37%o1edQ;rp-rP z?P~zJ<_-8*I@JjZAZZV*H13OKBy0C6yONgt$=GMF&ouD zMOqk*DWEwg9tUJHoHqigN z#dFD$p0<0A&l9}O4TfH*0@lsKS*p7!y$Xp=XX8gjc62Yz-$E}!<&C!mDq$;rlF#wHNpsA_2>Wvgj4z~cMDH{1Tt6{Yy3r|{+ zwkxD+r?`v`;;>z}2*@m@1c64PvvH6`mg2wW*{e)o+<481nx56d!Dy7mJ6i?kk~ZIM zNHnCem!2_}j#&wylAxL5L3o`xt&}Wjxv1_ruyUc@#5h76#S6>K!3I`Q-I71Y-!4Sw6GL?up6`QkOSxIrH8m5~R; z@;;yt14Fw2wK$3H8JAGb7f4>{Poxans;(kI{ouEH_cj;-ed1+C?3SwE$Iwmb>A^!8 zkxj{ulip1X(@7Q!_?Hd0s4Ka+Q?5faVu-8si2e@PSHhbfWLwqIt11lc@!ex~#2o4C>-LT7;&D1vux%w~$1Byf zHK}H&<3-i5N`o%tN>aI3T7kHATUgCZ=IggW zUAoV3WvGbMI*M4`037J$r<&LcDjxPr?7sK&zZjA7aqEAvDscOrHP?scUheS^>ga$<-zX5ep+k_04gwC3VE#(KP zts%-Fq>V7j*c;NNDtW3h=M-4{<|X9&W5uUOYDo?}49TJ}{i}??c)l9%8zbZB<#Yd3 z0?YIB>9}S()y5siNzbwjf{Wz2hVSIysNzQ^Q~#hz1zXs`Ir*ICZc(mYpZ zXapxx!GO`aUee&U1)!vFm_3kkKKY5^5-QxEzR?m1f=q`%7Go?{L$8RJppZWF(gK`n zt*o(!GbxLsTk^}D&KcEqvbY9O8^$%XrC`uv`yGM9%{J3R+m7K+t6CYQe!@&W`OTJ= zeoLJJD~E`lc5*Ege%R)*v+AprNWGSx1~c(<@I%nY#|;|4XW56YM>2<-8^NxNeSDEt z6JqUe#IadYHE%|ax|3XWsDKC{34L4}v4*TsNETrKEJr~(J@>;i`43+yQUL=Wvoq+{ zUy%|BwnpYgMkDwES|I~0IA>~zXdySQON?RAWM{TK&U41)D^aZV%vwTS7R;>z0#&14~HMkN64 zHJIRFCE2)5b7_)B#{I#r1S`2F9=d06;J9%nB?Gra;A#5@UsV~#<+PYCgI=P3ii^lr z|Ep2~1x^9n&&N`xM-v9(=!>c2b#%J*747(Yu~q074pm?+hWLJhe!|b5A@`9Ti_DD_ z$X)gAf~eX5FN7BcmWrP7X1rsruE0+YD?4ZXHuVzae1%#C5*n50aTe`ABedxn;kyob zi(~YcJ50_@cciqu)OnoM-l$?-Sc-1d|CEZvJ@nHekA?uj)}(7z<&m}hL(~Eu66rbf zASI*V4g|9L$CS}}zcqWt$tSopBr>)1dj#;YVcdkFvH56gEi7^>ccg@m3gmjK>L>2QmjfT7v*gUS+F&}vgKN7&@Ifj%e z(#AoLc@Sqy!#?x}hsfkdKCpo>S-Le3RT=u`uKY6~CV+uF^yiG6tKXtO%y!iL+R}P! z5N-k1k5Xn{7p%^6Erv zd04&sra)TrPaZAWXZdn7qd+0tU+lt@rHIhZ;4@RYGQYlUmjsKmtzX-~+B_oE{4G5i zAzqhds0tCM@_jG-NUsbNL@|IcnHn5aUh0wxazTs*g2@+2!b|N;z7s7chxrzsiH4n6 zVznTv03OX4W(X^D8#w?b=ck$MHd}1AxGT5n%W%9_u&>*bOyYrsJ;cW-UxU*u%+t(8 zFCl_MdBH!R`7*XIyPFv%^?F9*@wH(c<=`|K-7-e8sJm?3u^c;98WssDCBX`ob2XXA zi1E+gXA!;a(Xe%Xy1q4FU`c%A#w5({xAwze(vNnWj73m{zRp0sj;c5uMO95QG3z}E z)n;OQaRKh7R=9c|44<k*`;Gz*E!_grHT2LaE!`m9 zF+-PhH!9s7@8-I%_j>KKMz#?B8B{t#zKq|2Rpqe6U~DQN4g6?b&nS*n@Xv2Q35C zUrBx1$Jy|><32BUBN7s6djmPYm}xuV@wgePzPz9RSb5YWP!V6cTz9V*w`CI~qZz@k zNfc_y9j$jv;Iey#zifN+Y}xh>mHexR?-02}Hwm>^t7I<=*qwX0Bli8>@%9QwvN+Eb)-281( z$o2I)&R|9z9>IY(5-s)hvo7jOJpX(SnTJmtotflH@Rd~Mp3zSu-M#a}?Ylcl@Vq5& zVPp^o-ebw)kA&2zN;ujZ48r_=WhgB^qmszeNL+cM3sl4gA!ouP%e9bm>olj=xAyuB z9cW?Vvx=8^bD{SD(5?5nlqi3NI5X8rU5y1uJd=;sCH$R_&3(z%jglXHkyKTEx`iJtCfFOYAx_^1jnK?ow#%3)~!aL zbIFcUVbM|@(yV=bwe3u^W#4Hzn$=K+Iw7LXRPL`e_M}=LDXke}lCGhEQP^j$$dGqZ zGrNkFnomWJ4~+&EW5SQv|M)6E<_u)}d{K7}sRRo%3` zt3pvSK;@HT;osn2E94UbxLGrx0~Kp96Z&mw*>wKmYvdvPp+ht&`r?mqV7 zB8TS%KqurwjRwGzWN$+Tg!jg&>%}@6Y0)N+dX^5EuuxUCEK10UCxynG?SqL zOUZV^B7bC*I==cd)|FKjlqHPi+%!#Nb|*5^vZ{Dnf96?ozsA&jb*<)cdt`L!wMgDO zTSEqyDDU#UTh6NYz&$>$Tj`;efY%ODgHP(pjTV1%7@p;Gtc=4l;Iap0onf${b}Z%L z?r8&2SlXx7c3gwj(2*rqP)5u3lPT}Z=Psq5=v518qX{HA=4T|XNBd(*Lc25z0z3So zwXTE-N-a-$c-T8OUj2srYCqvz9p>TsUCVQ{5z$g{eLSw>F>C<28n6eL7{WQ2FsACZ zy2q~!73cjgm#ZmQhnMZCoQ`XP3ZIXkAw+MHWXb?CA6D8`0Tu6Q{}MKKu!Y5Wicsg; zDg%5p1-SHF9QA({tc3E(LF%~cztD-pVkRE@ElY;$)YMN3OJqJ~tP$?sJ!Rd8Aa{xV z2I_<4#PZ{IBTih(!~~4M!Vygmf`3@5OX`}BJf{U{1)xa8F5X^k#IOeG?zGWQ@h5wL ziqZ2YRZ9MhU9l5EhkxMD44QpHZVPj8r8Gq;o3`F+2~|v?|1)Qi^>L!J$T~=B858ya zyDb)bREv$AbI9u9nFuw~KFb_(zR^*OHU>KFcb%HBto*ht0pWrRk{!18$xDl1#s|3Rj_7DLY8HU>*70M0fwYzUX0_GHls5!v; znXv!o%JI)X_iR7~R6_GsNa)6!umDp!+s@r%6gZ-y{ba0Z3xJENvLYGj>oM7C`6e1Y!b?VsJU6WwxWX?&m^ajUmX>;2n3Zhs-zbJ;6^k3%BB zj0D!*B@bO2noeAW9xV3=WVD4}EV-gCdmJNk*Og25by%Ahu>|O=!ukiPkEZ$?lDKv~ z2K(!u1LgftxAn3ffc!O_i#p-TUv6+cv;{=fON0#TvkWDfR*=DKqY!sNaZEc!}lt; zmP)=rrcen4z#eKk77hWF7t`{p`R>GDk=9*sbRWjr>&0G3i>^ueJ*@S^7*l2Zhcz0L zj&tbe(e^`HqT3JCY$sc@^tY`J9ZUm{R>$$W%8?#*1*n~DlP9CE0+hwu&R%|Gk-q z0aCYJli7xRmbnYTb;wjWY-koFH`#UmU{B9HFw3RZlV@FE#n)jJ{clL_AC8g|?)|b? zA@_$SF|HT;$CWg;-1>vA46Z9UTQ4|6{kmk>%NjjhH+JBL5P;KwM1C;c-2cIYp9A}b zGJ$u{VC#X5p?$q4Qc}U~&*1>#cpODcY_jnR!Ka}Bsh;X%uU;p_Yh$&Cj8N;;j%4On z-U47JFxQz#o)>Upntw*jfGf6O;2$a|s|MYY+y*7wDZ3@>KU1 zN3#&V-mK^Wu*$S#@s~G>v*Ne6i#wP5dqkJldz-F1$qlZMO%0E$dji3FTiWC5I-?=8 zkrilphOAzP&Cj^4%-a=MLGNNIEHG@|i27Ef7c zm6zw?)T9}Bq`^Dr_Q(2)*gfcVq(`gS;zvP%isA z93VR$%Smk+zc%yfRKBlZtf;)kcEl$9?XpFzs8Lm0_bMAU&;I;h%NzbPHN6*J(i zO|5va&-k;JDjXSWET?1Lf>e`3?K3;o5kn|dbS8m4KPO_A3&o4%?QRlfp=Ey6!~}y7 z{t+$64!DeWd=Yq?`z26q{CQ0yMyVn}Y^$6waN%NLwwMb~tk`|~7+H|pdV zcgpXUnS^I9`~+{5R&fKt&h=SbDPwtK@`Y=>05WWZK_7HvBnO)QA*XiX zq$!0ZFr+?WI0pk)PLu=giDW)9qMyQir)YeGrj)x{C<{0|w>B(q#nIPZDPs~>AFS#X z8X~tbX@5sUh>Cr85!PHlA$EL8A(z}UVXcI8=WDC`;r^CDK zmTzBaw;Btkdz#`b-LFVC-(9V{-p!jG-EWf|ZLF(vnXLgTi$jX=y|2Xdi*9K9lr!k% zOZG@!^{3A@GQ4O$1DEVeF4f5=zwth2NDGd^`nDQI&VdN+5HLiX7ADy(SNFPJ zs)xJoe%h)5Ra#)nl;0w+YkB!av$O#oi|#H1!n!v->TbQh?XJEWN||C}fhZ zDS1+LY=sk8-0%33rEiViPT}tg0BSDfB+1DOOvM51TFp%j2Vq!~zE&?m&rjePJ99ku>-?95p-)^E|5_EWtceqw$d#kiEQwPVnuz>cjqRJv{EIf0iY0 zm0*kO@YubIZ&C$TJVsJVDCsxsxQ@%07)BYn;&5_7M*Z_K@#dNa)NlQBnY>Le!mDnS z?QLoQf7{_SgV6=|c?4ShVW*8YaCUFMJSxNv@G8FKSywtw^YdTh+_m92B^HSjCeWR9 z|5@au*Ixf2YYQ-o*u)hBiFGstlYpv@g&!P>vaV}`K{zgIJ>5H?0(f$OZ-N)R5Pu?$ znIBYx&ss~m>XNd0q5XId#AUB&1o!=JQ^y>sCOiP2Ts?++M8F6lVT&Wk(G<0Pt3y_y(5umP*b zo3$}i3cn5XuYY$7jm(&)p%B~*4xI9PaZra=CI3R70SdBtwufsrs zJ(c9Y#OI`kF7ONfwC3E=g^UfXn(RD#@@?1soxlE4b<#OZ7RBn52n9c-ha|2c#(5p0 z^Y9y`olwMKJ=>tm%r!b)=NFFlAOC)lLsoBt`1NSGr^rbN zpfNq(TIIm3srUhmK{M@;$-1YmTR}}Xh#F~W97mDY+B1=uG%WSz>dApRKo;IH4$ecX z3|-0`3SaXz)D{J!tV^wB6+I@g9<@YRcLrhmdvmibAp(TbvBSRBPuX0LZjf2s%}D6Q z+XDU9>_==m{p!B1PGU>X===(jVQXSIslKT_$MJ$P{{Ln3`joo(J?iBe7I@?RO6|Ol zPPI=-$%I_iGR=Kjb`Hv_`Ew;0wR;!}a|@}pF71W|%0`h5c%fvJVxJ#7C8=pZo#n?* zuH0~7*o_bepPqbseHQ8D*-*iHl=kqne5SUqX~RTyy<(Q1TA`otWbsVqbf@vJ4nwYX z8=j86pPj!ekBkBD*<0>yX*0iH|G)(GSj5$5%^a1+bLBu)7hMF64vK-0GXp~G>%`a4au_Al-!kOO z75{@u_=XAtwCuz!b0Ip$Eb;KZQ`0kPIJQ00%Y-5W$;-mk!hSg+GqV6SIMj z;s<2JUdXzY;f*uR*0NZJRwj}>V7M#M*)M*ERz`f=aW!1A+eK?_cCejeyy@rwEM*k z4S66NM4)|Zsd`aFPs%&QXI{u8s|X}rs;jC#0|r|MMg_Wjj6=1*=1eaX4z-ctKf|x_ z)%z^ua1)6Re&Z$MBTyp>>+JdFYfCr@aszMNn09n^*wOn$A)liMVtX{IQo!~fR*?uv zjI$XtR=9oFtZJ$Ertw!Q6et{#>VTk2HCGysbAzM-)CR$_JP>$p)5Z%L7 z`Ht4D7nRh`ZB4yak#ueiyTQY1iVAiYWQ|1CYu+>D-Zwjc_2Pu4f9fd;zw4sRj3Q>Q z<%y~XSPb*LTuO2ZEiUmPd~5ZMuI)vG$nzf{>5`ij*^)LFhB7-v5W~Sx`v7b)f+Rt?ivTNVwKBKsaQ809etCnOtH#@Q z@M=EeXqU*lv-adZAVED6dVmVlx_DzI@-HuqGDWZ6?e2U-TWZ^rW2F>&Vw5-B8Js%; z*`PJ}@#aW4{PPtyH?2p$ z@i_j=EtN|WC8QN9yUPO4}rn$3z68?ZaCYDcvs>buTl?wt#*I3%w+fFPzfNo-`=tQIa5eh8VQQ%n$V zsY>6DATPgVJZJG-W?8wU^%trlIY%Hx=7&fzoEQjI`IJ2J%9bxTe1MV2BdQCiRt?7X zSWzOa1NK}X`LKBxh5;HskK?5mWMvVHV%B71%cttqBod-PBjDU(|C63TOcgby%H3cm zF`oeRRsOy90CAeY`^mMhuDeV8`0|~9o@f4?4W7KQIQ&C`&Uv@FcgyoW)AgEV{%F)J z`O+%!`PG>)h{6je@r%KK?mYk8dnC$!WeUzN`P#7})k!?B?!qeVLyGTU*0J@KGYg8z zd@R;ykKX>Ww%{GmY<{q#<$)rGHuxv_`c?Jj12A>4nVE}KK27^zu3N1Y%UFg^fD))h2+j6ouFtUumM8(n)J~UmU^4 z6J;hSii?dOd_%h%syzq;eImj7hIb6a9T*{8eGtH?rTGa+ZKjzIEXjgMO6%Y zo-)U+K{@05eb2yX`5L#vb1zk};wz<#^)+GfvTk7cvgf^Q{F+8 z%bvj{VyUCrpKyS?<~XL?z?yA*0-l;1KJl$Qrb6DDB35-D6ZCU zLQzrMCt77mK6UABBplV#Fjk6y#6(yZ7J*GC7C^JwUQ@x=#v^_Ax^tSY3Dv0~X9mh| zhK2H1Pb}FHUH%81i`83@giuKPf1~19CMlyoAaGRInIjQ)6Q$64FW(rz#CsPI~gPhpuc{rM{o?GpE8j^6j zoC%}E<1YLA{O1f;jxUCr(@|cmY){i6Z+VLPxW^Y}wW0dceMMS((=V3D1$@4?oQmp; z37rM|7Z=;Kkr5%T3UWjY1b=QPjgKBUUMq=Uhvlx@X)laLzN6_MCY2IFM_|;3g6&0w zg*YQ$zBul*9qTBx^dB!VMXB;lyy7I=7zK`Syv#E`r9tCA03Dj|bPwA2`zJQ#*f=ms zm)e$)Ju}Wiu<;I?me^?uaRJepRlBY4I3%dc!bg7*Iii^gu&aV5hnh3&+xG&d!2wly z_?kd}vXuFqrPR|DOF7k#I#0Hh73{QSa*O0~e*8kjF!{3P98a3)6vS=e zgB=TynJ5VV*+n9gyAWQ3zH>NO*N}9vrQE!GTEHLx{&n&RV2>}p#u<6%;>*R7-*Yxy z7-eYwpJnt%N60fj^8hW{Rm3Ap@WBS+atW?z%n}gE;+xOno6X{y%cM6~_^{3nE|DsJ zJJ3=2nXVzCLG#0kcElFHA7Zf7m@XxSsG!(3NI}d0_!qZEAM@C+x&HC)(l6R2YLfBD za2fwdf5WikH^Xy*t`J>s`FYuJyX1uLX)>^Ha?cY#A+~Y#D7qrOVABL(9#9 zLzYWD9ckn$F)ip-zdrK{Xi6?IN;xxWj*A9QCCC+|7OS+Hq2QEx;@(U*`%<^LQb~B- zaZj~9yk625!Wzh1_H(PYfg7>pulL=^tgnVluXO>s!5+omIVIB>Sq~>%K=pv|K%GM3 z9Hf2kL4o-#Eug$i6t{T4nbeU%spm2f208UVfEYI_9~oIj9|0A%x?4Jh?{#?Nc@dpI zxxBju!{xcWJ2$m?H!)e$T%PVH+ly^2h~;5{V3``n3pbn&qeQ*+Csqn%4&s1}1*8uB z>?19Z5jfPM)_(aXXI1dVDXv5tHDmJowUgX`O41qGPDQ}E=)*fh`v4kMR3X-cH%E|m z!Fa;+2O4RG0ri#3gC|DZ%R!M}L9F7ySr-lfAtXHEEu5Q z_ddAnn@6Py45J}5aE)Bblw53>oGD~zcBLi`{9xFAE;-0r)tLNYX#KUi1#xGi<|&Zs zzj$E(8u$;t$t-A((fVEJkSUuLY6!MfgCusfejl3LQ}iyjvs{%5Bkj^(w|D;4W4Z>uc_}f3obql4 zD+)XU!ZJmqbzy$(*xX+eI?Z+V@_JlxibNEw0S^Lp zq#!;2>Hq27{KK!a0Q|ZJ69ZmI*5pdJGTfoLx%COw(o#ssi0zhVXpeXk#@DtZ0{zd4 zBU5$RItKv^Ai(K19{ay5>{XV;kIV6OLdHn7tH1A&al8q&l%ElQd}7)}Vwy@$Z|XmZ zkv5A|+J}Zs7l8q?3+YI1}kd!~iAKj(uor+pb~q@Fj7Melzcl$VqBf~&7Qhec6Td!%8=5;GStuz4nR$m{cn2BGw65pa$=KP*yX;65I34Hu?2fC`{=V zv?vz{>#NKm0!5jYrBePSPcWs$c#C8-2zO<^Y-thh>%vNlm)pyz#bu@v%A>0HM0WKb zNWGRWpPt+5M0kE`lQyyE-!7JOYpjo_%&*D9-weD_(XL15!-fN$8=1{}wt~49q$_gq zjlE!?GKVdh6uKL z&vN8n(rNH^1s13CWkV9*H8N|m15v_ABhtlS{R_vf$c`EGMnUZXiElF=5s;CKBc6g+CANFcs|*TBFU`2%AA1B;k}fU6MInL8R8 zzBkD9evl1GALINSwbOg#uXP`4%6LU`GPS%gdzwE(7=c<@d-g;kQ){g7In$&WjU|nc+9@*gxQIiyy~7I0Pq{_Vu^&D#>z$hFj)iP!55zYW zd)o__Tsw`wK3q6yBf`9h`K_ar5czF9XYx-KmeeeB%at!CuyIUW5z~M)5sKrkqK`Rc z+!={!^ePJNbqR`qDKj?=<5aF)8@ZLYw4D828mne)BMmh1N1l;NXOcT;yu9j}-~jnR zrcIe~Pu5+se8KA))F37ug`VsxYNz4;Y=0}E_?!ea1U@G*#($&)HjnA35e+gzC=G_W zwZ8GR`Qd6LCM&=s9HVw|&jqo35;XUqe2&HUsn1JsOs-j+h&!b_$klevfL zqM7VVM(ygD_*9B`RSG9nmkK0Qy>G@ceJ^jT>yOYfll$ zNRC-vDQNLoy#fX3rbWwDl-QKOL*GS=iet8U(C< z0twkP-VDqqF4=Y$xHDIKO})zvRA+(71%r2_H?^f59b)9s-iHhu6qhgfH0cUT**9&h zO4i!z87;|O_C80HH19DoEd=Hrvzi+R7A(|IeVb|ysyPq~F4x!bJJ8pUtRsPSb$vaD z?@XupE+(c$PVqcEUk>S^aT^>nmpIpcr+m-yInlsxO7g4W&djPdpFKt1pnfpdMN9At zT<@S4Fi!^GHQBb#-c4KZ#O)@C%l^3xtdVFOvETzp#(RE361fe6|I%I?A_`iy1^+TH z2&~hyrn}Q_4U@y{ah)%KbK*CqAl!PwoSU=G0+Hd43IOmdn0H^_-(iB>GN4AvBdu2} zON~`s{o`1AeLA8>My&o22qH(NZ3f1M2|XHg7qBC|u|eavtL>K`7mO&aM<85CfFxc$ zGGO+qE13Hv9q@d7yU}_)me2sU{cd?NE9U0CfxNw(JD$hpCsla6F2alt- zF+;Hs@ilV6?gKTQXTp-nF4d_GsYR6t22)Hy$wR_Ib15b&b(38`K6Z|vSrU+U6CIP) zQ_8$U5x@NGf)jS9Px4uVIUthcn0D=?(~SF#{-+?c?WRm~pSQEUXv(A<)mf7R5ThQ> zF)+DZ=RT(td{~MuPl(WzzFxlKQQH@So1K06=Hp*;KLj=(TvQX-n2K&+9%w4$7JV^b z;sPSMZt72UcjRBK_YBD>O1~1(askr9dFegQgPL@+Ksqs|4c631V*vPufnQeFW5q&3 z%1K#1&)`A*U=^@2wca^(V+oXb|Q!3z9^f zX^xs}j8<$O*P8P3O}}Bc1>PQ7yajw4yqX7#HS&)$~v~Unekq(1Wm4yH?aZNrz zyt5`;6R6`~0Z@7Zck1sDqt>MXc{K)Fs&_#A?eheW4sNQ>)8Y?EP14x;dLwzdNv95f zPD@8g0W_gnC_dld)Os8fx_I4Fgo=BS&YsY({$_E{**+xzKLFkub-#m)vpfs3x3qm;f6x28 z>94|I!@iT1&~J`sc|MivyW=Xy?QbuwvtQQ$ZCeSIUn|o9@fOpsEMI8o2bXkcG~iMO zJqQ4yxua;G%lG*{Tzj%TueCiuoB&{+FQ-y>!78=*G)x^_hjoAd)vo?`6(MLJx!YK{ zEDoq>I{+P_G?<)nU0t>1hbl|CfGG3CG6v#v+CTZnWHzoX6VN2=@QFmpav#aX?u9|L zbBjM3LlxRihZVk`imig8v4f(csO`@$-1Ycfll;9nC|n3A*jaX|$ZsbIX4l_+@_+8E z`LuwpVgRi-2mqdW`$nf^+L()FbE?RgbNmwbfiL456CHkN9T7a)%KgbCs@;Gi-TTi@ zK0>jJ#y`H{sCU+bB6gEdLkbso&)D+rnB@vCICEoi_jJiw-0+3;vKw2A0@7&F>7FL5 zeXywg?!!eFTP#Ph_|PHB0K}RM z%gvHpLiGWTMR6jEQIo*)L=M!%M4w6$91@`ZQM`y93&-*ALv9&EZRvPMsbd=;Q~Y~s z`}qQD44?41Zn06h^XOlIkaOlFJ8Pz{ob63u(N!Jrwa3v--I=MiFrUEW<G+}29-=)13fYCn?rS7c8RfsF1bn6bYe~knEDv&f~F2EP<3JRW_Y-!*Kn!nM5J~P)+ERQCUklY zVB2LJle~)*BkRk70SG|O%H;`0LEEM1F2xje`EW{rtd@A>P5J|&dd4*wDa?C5=l0YL z;IHzXolf&(`+eKM0@?lJ zOeRPD`BAwGEIFmW26DMKgtF@x0XY-Jf`}U;01T=CJh_(QPou^w>Mid-t{g@WxR$RG zYAZ?o*#{!6?`Tj%X@lXt!00k3y0CT8)p_hN5jtwLfCuu6_t>#?oC+qT<6==5#QDBl zY>87JqLTWJZ<6kI?~Z6>#3Bf|Pz!MVr*Yb*k)WFBVX9AIi>SrX*a6361-iwvKXKfh zd3O0}4_;+y@pcZ^zG9>bL0z?ooFQz{d*}b3t ztnQcB$o-S|V^(hAbHy`wL0O-fzZKoK1*VFs`;$#N^w(g8!sTDAI6y$OzQj%f9l<+- z#O+vGf~QzxLcywz-=KtuRDV0~gtFeR=AzzN=I>7FSim|UE7rvH0maB~M6>y|Gmb!R z)Bxr=FyEs;&j}}E8ou=L+U(VT+EtH>Pl=EkZ zT!8zh?TR4%iXRuy{)`sXbdlqGhW;n&^T^%Z|H(c;F-C^3nj_m7fY6#?o^^KF(g$D{ z0IEl{S{ATTfhfIZL?!E+2dN~!vvbr9?D>!X^&Q#|fLr*_8v4#!1@##RMXyvYJ)z`L z#166{k3g%9Kr%JOnzag;lru?<;QTvjUR3%<`U`tXs%6#`W2TXf(Fq~5g=LwaborZh zBT1U=)!ZK##EzT_A%^?BtbolMHFWkGU;`RrUpWD{h9X;gWiW6qhQfX0?%pHBV%+)# zu_LUTGb-mx01`eZLa_#@SZz3~R9o5ofZXqx=i0{L97cK3W zHbEtgcr{jw9sVK});Aw#D(~be78;19ztz38ys_Awy%xf(uFH9CDH6(Em-(mNbi>$M zM$eh5=-B(M^f0l;mW?!IScdT9uC%lpegB2c+gXP4>~mkY(MZzpQO(5cyB}@Y>oZ1C zg6G=xN}@P5`QLp?Nxp~F1ucZc%OAkQO_agA{TG~DeFvP!AqQ?4+tDzO$mjusuHLH) zqls?SB+-4X!QOhZSYTStBNHC&CYqYTwT-#JbBB}8N#(H41hStmOcMM-k{2mR;@G1f zj=sMSy*Wtr4}mq0r*Wp&Jd-*Qrlz%#!1lKhq^vV=W=7NV7B>BS_t9$C85y~UY7!W- zK}Gm@3(4}1V$@U+TP#rK@?Cq+JB-$)o%l$zl3h4qzRU*$4}5_DZ@hVg+D8H}?xj6! zuMg^LxMd#TP6X8O1z45x{s|nm+u&;7rUIKo;4E*AXJhkERR4K5Lm}+!ri9#taW}Wn zBsoR&g3muO^vhAGGALKIgY_KoY@i`h;KZ(HM|dU2wubVJiV9=lgYyh*|8MMJ)0gCCR^rb`~KZh1!XmY7ZbqLN^m%gmnE zjFpx)234v8Cn?5ratI$eq#BQqzr&QT%ci;=QrB*iJB`noR|x+R*j!0QL?={GT>QrJ z6D{vHv3Y!iO5H?z$(O*MJXzL1@65P;q)-Ea)d~LY*ZrjqD#R+@4XFga*LwiPh8mt5 z$CV2EkhkGLMqJ0>F(~F^58!5N(loo$3kPr7%b*{|18TKMCqmtmoaLx~SbQm6`?9`u zjj2CW)eIKE}h zywSN=HjmlNY+%MEaucbro&MO{_Ze2wVi}!QO>}c!)^%M|zBbR1(WRUr3d_z<$T7L) z6fA2+9UE7c>^bVmIjJ{#A+h;ddqe_PC-I^qby_bV7byEr2+4D;uGVo{Y?4sm!(2Tm zWlC<*Xg@ZGjD=CP*C)qvneMWI_zpc3$Xs5ShBp&FHkRXFd2+b{2_|7c-ALs~mjH~4 ztCls01g1y$t=8Q?T}S&K`Gt14tA(cwUOm7g$Vv4uI!^(Dxo$XygMU*B9nu&txX}YL zek^}mVFHvgk*Bfj4+O0e>)My&?U%s8^tRmtIG92ocgcshrwZZup|dYR%0FrGMJNaz zPXd5mcS64!02Y4D2h^loLh%$^qk?z=%hW3<#xLJ{dd}7|y#a`j4Negpy2qwk*Bawm zp~eve2(rF0`xb-kA_ic%PYyYN-F07yAYZZPz5fa?;r&xi(}DT<*((B77fRJ`8l=n5 z#Pu^P1Nlk>ev%v0 z3ScTu?mEXlFFo5VU>}KnWB-93y=x-BzUnnr6*myY;st!}g@)S_3LgxS8&JcUJQYxQ zeT|q*2GQn#vwB0bsI#IQF?sVWe+`@oJQ%5?)Dqz+sLjsy8qAuTP{_)c-0mc-r!Xzq zLjON@p><%j{!GL+r1!Ua6E+=jd3*0+Wm=qsS>$I4u>F zlB}p^^t@a>knW!^P(OQ#mkm~lk~qjULu6$pN?@J2QrI=@+TjV-%DtI^@N{hgld>v0 z;Rzy?{Po`l3Hc2N3b}LPrao{(pT5kuj@ER{p;am$^)TJA;_YqLAhhIkVS1+B$T;c9 z;uFzw#Q6hC`S@_)X~cMrm;$$y$N2YO$hj~(P>W%W6=F5rGAVRBo*7W6m|^o>o4Ae? zjIWKXFoNQp%mV`;I|>T?RVSavYIR6fmT;4w)xZm)i86nHaWSjEzVv=D-Ca23@|ruv z@LIU%JGU~3xFc`tuH`W3sXe34tN;sG$m3?aUI_1+&Btv$1S5jp^76&MG<&$1xxOQx zslS#k`E-ZbdU>tsal21@=`rbKl_G(en{y-ibYfdV#c6e)8Xu)ZR6!D^1S z5c)ki_OL(hC`rhKUWUVif|mOB87h22Y$8f+SYi70AD#(Brf{ zF+%$u_?MPrOU1RbT_5B)O={(iV;bH_ctZa<;>K(_@+m=i*JB>)@Y8)L2Fh@INi@ld zZ-c=46*_l?>zHx4+e!o>=RI=Hek(~1^z(4!IF`}P1-#6Q`oU_I@bq>?d(tz&sdnpy&O&aM_OR5qhTu zbJ2yh=yQ08^9u&O1&XAF9|6uGSi`Fkol%KXz0Gi`x}F8_6V<2PA&GU^Dl;y0eTiB? z!TTzr>5y_klb7Dh<>caPWQq(FcnDOKXh_Ybi7_fHv1o~LUUs8hMD>JJOkaAt!S%tN+zI2^Ez0 zOB*?hTd>R=xh8M3su&n8VI@=gKdEuT5pG2dmv<}NM!9Xqo}~NdVh(%(A&)GiH*;hD zX71XISr2)5w$AVNgj-Y56Fr-c89?VuBgp;HBXx~1+-3TBC=S;sOA&XZ(fF_N96dyy z8JWK~`p%UCXfOmtUzwNM^q#r!T9%k=$lDm6DC&~yI`RM928M^tWOB~|{XmGb>&o%9 z5G&4841Dw#Apv->z^HJZ>Mx@G34zVeE*C`^-;E|^R?M6D_gF(m)>ra=cY08M0v__J zlkSRL5~N1;dge)vR>)Qg=0w7l4=L(1Uj+_R3ZC=Fl5x_uFxCS>cu=%|33pEx}{B z%>%LBv{bYzx0EuXw@fliH^|&utlddRQe5u6L`j0T4uDFTGVi$r^*UO5gpN^Jdo#u~ zc)_QdbM3s%!CcgHLpn*~h?%wRMu|1eo&`_<@dqt?aMR<6V)4j+hnes^1OX$VyPL1p zxlG|Z_?%v+Sm7yOsbz3m9vQQ+aFG#sW~x$`-gHJl$j95K%NcHO1vtfTGzkBLAV1Jr zOep7G|LM@BVd_}Z{=BWTVB*&UoLvNXl_U*YS^E{+lcbDn{19Tl5UAmIk~m>4R04iT z{NTKo_9U$P+v*hY{mMN+xtjp+`+k*LaQbWhIJc}5D!E46VK;~WQv8>bLOQDwK5t}| zqpGd392xNLkbxLxUQ%P<2odI8y%P*}?FTqv&|$>|chLLM5dwr)J`sNo89)D%IoCPu zr>Xkb)k;XkswlBa7){c+8S{NMXrB+!n|JS?9cfff582>(uzMN30G}xNliBbG&&{qJ zaDD-2;ZOF{GmS zT8CPHk`Fgs_?Ong$VFET;>~g#B+vc~1@qE{RjXAb0}3p^Znc0j^W`~;sHa4oL+ZX$*li35OJ)(KT|v^s?-k5fY|y^sSv>YR z)gMunp_{0J3 zu<$D@MVgxup3vNo)Kapm558b8OZ-r1ur08lVSt&Ijd4AN_Gg|9nEi^7WJg*J#g}`k zj;X^;tEbut1Q0qso8x%te1LeGkOlKc#rFC}Q7pcAE4(tMceLo^YI8}EA8wA-CISlq zg~}iH-xZrq9>D)4Ye=D-Uw5gxNWX7*Jj?<=pes55NZANS4sXFbEU=fB^&^RZM9yhq z9pw4VLb#U&t!Z-=3d}Pt&>4M4b;;0gmdN;`;lusI7P1I1_HATGZ?t(o=L zN+TE|W77HVLI8yWQ7y^hsTe=F3y4w2%CTefoCfdNA(+q`p$N%weYQ798A8Zvs?_Zt zER7Nz;Hfv7luk?VI$?JMZ+0&s2F}h9pJ$qfKe<4xUz<{-S)^+*re=WSuY}Vijb7gM zs7*udSW7aH!zG~DimGpt1!N9?8~sF?Sap!wFu2dC_8kO32@6Jg6Jh6!d%u;D3>}`N z?d?)f?s`zFsuXFv^Diempx+oE_XSf+!k+5aMc;{$X-`B%C7R?I9+(pD-xAX`O=TNS z;}-NSM9zKJ>s>wq(g*hx99(*ORX~K9^76~x&gDjg^rg#LEF+`g4UtvHea7@kDn~}g zO*uOp3L9y!8(?^Ov;VvoeHm?Qr42eQ45Uz6kk95vsVIVuij;8O8xnF2#$;T zlS|Gw3tfND^u#GJfkI)M@tU|HWtx9-zY<{GZA3%v_;5W*&|*3rzcNj-3QnDZ&+afH zt)qUUMi%F9PhQ1!UHP3W#cGvI;*x`g6NcoKNcpM_9jDkAcU!}v8zRtgaM9V50IZ*K zpt#!f($C6*uLx2@zGB>S5?-`h8PxloJ!(27AA5Hska$1Q7VikUISl3LpkEu z!f-z8fnoJ%Ktl5tCm#beJ;0a?M9gk3TTwHB<(XX{4#Z2t@?lp(@;@9ue%zABF#4Cv zR(m`Vx8aLTyaGOMz);*{Eds5B;*KH?TtibJpB~Yyd{hNHU-hRX>Mfk?TsfqrFuF)M z-T6Ojv;;6swyL877>&U5mLc50K7ZmgDL=Tbh%)G_(9_V(L4eCn;bpCDT6J(+y{JAQ zU&VvME49f$P$5o$jOYXI-67h*=ny$lsFZ#HvJI*y`(;ZUx69!YLL((5oxDj;i;8AV zEP`^EZ}y8nl&vnc#@v!o*3$$>qcpxEmGGIHb+Bx~>z^?HPlXFVH=JzOem1!i0?DHH zo4K)EMMZz5MYkoh2bK)@I!u@pcV*3D+MPg9M#aHb_TO9jP(x#P7xR8!o>G>~Lxl@M z^KreP#O&%=R*k|*@q8+pn_28PvKnUAQu2SjdRZJ)4D9hViIN$Y+NhG3TS7ZC#HEb> z+HBZ7JpUx_ML{hgfJNQRkGnG^4y{>k309WyKpm0bePPQ=7Lq|6>1(xN&8hGFipDHG z2=MI5GQ?-xkB0=GijhPjea&_`C$#dkpj+~~Lt%B_7E^HD#WEJHB&N;KhK(tE!TOZT zI&OQ!+mF#RI(cw8dIoS=+l?G$4gVigXBicB)OLNkyQM?v7^E8{RJx^^0qO4UZjc%Y z>F!p_p+UM^knWJ~_wW5Y@4BD)z(>|%EzX>C?Q8GfUThTMHofg@pSiT$;YJ+m@6Ge$ zwz8zbi5<9D{rT15xG?_n zX8Zt6)l;GIOI-6RN#9IF~B#m_F58 zgsP^S)(2!wB9)n9*`j%bE`d_X#(4F9^&i#%M;`V)K0CsjLF3nVBgQKMMl{8J6RS1F zP8BOUsVzkHGHZF#i$hJUsdAwZ`m z$EGJ?kW@t91eFE=wGJnia85fqyM%k2 zuW+s^O!?|l-x&bc|F`ho{pa4YU%8Y7$r|IQJ}{GJ{N>m2pBLh4H86kvAf}dPpe(oE zv#HNxT|>2V<7EFU&s^cG_N@i^4z=_B*cg{u?IwffjzehWEz{BR8MV9L1x9DU4aE2F z620%;$&=30&RgG$1$JMdKR8DR0aPn8F3)CH?syN=n*l_QBMCe;Hz0{BZFS$spfBW=vfXAVxzl6p&p6|3hzUj0xlFLXp zs^s)l#6$6F_&yNbgNls9B9~RVYL}H|I$U{}=T?ES z$x-k&5YS+aZM|~EYP(}!ai2AI>M3FZi=y^3xx$-zobu56xJLbFDgTq`i3vDbBFtJV zK_5h;RM~@QU6|9hYNlb(`<6G$zK#`u z6IO02i;F+tM1iAST0VKuS9$e_1sm(j@w7Ux;KtSM67;=d)bvZ(7ivrOXJla51tw=! zDIjd)`^p%oD?9nT%VepD4%D(NN~&DUXbAn<=|OTu6gOfO#siyAJ3?29LJ=Gnu^mA! zmPalPKUoGJ@Y`1Sjf|G|d0ljlT$aEs2VT1PtRIzf7e}8>l{@17`~|qoj`KSSIeGM} zcVU9$=x6owfFIR8BpMND3Bt`(L$1n)w~Kpgl#vl=kDZ!iZf{pErxAkHbY*5~a+tZ0 z;*CLw2PB44fmS#&LiXa<+$vP0H0y^`ev#-0{Fu@Lw0;d?UWVIF5BES z{_t3DW>in*pMyx(+%aApQC39RdjD==_6p5sRDCI38o1%%(TNB_#m_z5@^}xP_t1N6 z;8HOCVkHy=#)%OkyQamzrv2s|TTDr!u)J@BOuNrN52Pb^CX$)C;EEyZd4Az8i`O(-U&`096dm&K?U>mv0|3GmSGL z|LCh2E@R(UHSI5ABtLIC7{xnCUa3?MlrjatdMyHA`SboT9-8U_RAaxGS(~5zD$4@k z;hUVzHZG%q2d*j@q6cpG(PX-5cc9DJ%YqLkh^^Z8_~q_I$^0#0PcWf}O5`79fjIt& z?`bR=|jJ>Dbq^QPDlz&oSak-tUkH^eCcq{o~A_ON&ew+a`SRhyXK``t~^w2 zID1ihc(mpjncV*;g|ChF~p}?TE@6JjsvyB6%}X(3)d~k>Asb^sWlY=ufTlp z&VLz_%{DNY%ZSF8RmKhaQ*l=tqbgz_hHoe5^xlUdf*gL!u)N_b>M^={BR8T)4%dGS z+UkqnJJ+;nKU=UPyEv;_SbJ9CU@3b87R4oL3KqLy`{#?_Lm_!#`u^srkw%#xZOPC` zC!3lYK<5NA4Ku^wDw9N9%NQN;NLsM?^FoHTYN7OK;V;?-xMaqQ{VfrN zr910T!=FR%l}&Ly$Tgy^xjZPA7hw+JG5#FpO@GA|y6zqL>K>+~Ym%hiOMaT2*dUps z;7w#ZpiQ5xJ-z~278ysBEm%0eB1Gb^y(bIHH=f}mjV5K6kmpf42{_0mgl5ewPslP{-@iECt=0(qwA zH&BIKXox8ZP-nm+6vp!8I-!;|JjOMsyaf{!dS+jCr`EMn=P)fdHOR>6zGiY%&6bKc zJE4xZ51K-X34uWmC^usK{rQ%? zzXQKiqx~GN-$5c;%Q$P`X4g4MjWJ}3_Tc|`e9@6RHF)_hgdAgf5L1WZ^{!X*)vezB z2u9Y|W?r;NMNU0(2~$k?D@XO~u!*@wqu`6IT?ctwoHa8MdFia)UZXJCm8Q`d20P<4 z-m5q?qP7}Rq!=%G>$32=RppBz6zHfhSuFY9rMx{qy`^m@t0w4G%8?!bE+iMM6#ic6 zt;2_{nrb1bn+BiUjsf%+`In$89g!2QC{a3+JQ$ecgci9@d4H!8ZU&km7;g<3JrN%S ziHgyJ`b5!aMX+;Q`;dx;wAe>0PQyL4;%;lT^n9l1N z7IApK{fn9Ms}PnK11junrr^^7QN!7)6Gi3CYTQ957oGPMns2S6u#sUWL4(l?n|>+_%ow=9MgNc=Jt7fSioE;NL?&$Xknb|MA|GAw~{s5{P}gwS;!Q( zGjX5>3rR%rS^hbz&!Sgov*op4uT<;dzr{~7ALc?7s-%#0P2X5sN2`ANgz~i##aUj9 zqawJzI7NCh`cX55xG=xE9d7&X)u4QotniNzbR><8{X)##)^4jPq$sWrRwZSfpe{(K z@%@MmaY>(Z38+RZ%R4H>R9?(5F?mqxZBz0$qR|98uz~i6d1Wc1Vpn@c4>seJrPl)7 zhtl2z*&a9_&nG{`io|@D*B!l2|E6eFMF{>681cggW|>cc=d$pz4ysLoCJCh01=z35 zoF5&<6P^O-Ns@m*BjaZ>KV3@2`fXaU-`!$~3uZ)L=+2gEeU-^J`9C69+9^?rZt_mA z8wu0ak%(DUKYbS@l~~J~qMX-##*(VkOig-u+gp>@Qcbr;$}I#1`z=R`JO?j16_&S{ zqMSS($E!$7BdBEEG9+Z+qrHg_?Op>?G&;~GgM;EhvW(8FP`mJX=3R-#yS&N>`Bk(6 zDMZXLwVvICq0fcWGd-UZdmyvUxrZ*$^@(9matkFT>dIER@0H}9M=4*HrU<@N=s1%o zz!u;lWJj0Md(7sW1(VR3eSn2maXwDqrljYh*Oq*-=KQGmbw`#p(Z$`pM83KB&l8u1 zhZ1q&vgf*2)R`pO(uj@GZ!OJ=K*Ph3q;x4`V>n$|@^TEW{Pr_S^1~^xT6(u;YW%O^ zh$NSL{X&`tjNQ(K;EU6r;k{ZeiGq8wMB)pt`rWJ6ABH*3)-H3-9epFdL)>4oQ~nQoi+! zX^1B$r^DEP_z3OU!T0jsB5inp1bN@()i%aW=^6)hPu(|3J$!Gw)dR1{*nOBUag#Wt z!G7cvnf>ZmE4~cjxB5{54MuUv*2Za28?Yf_GviZrwlv_HWlNivGSWcdgEtNCVZV&_ zKKW4WO0Qj!)Zto6>re^a)t1u!6y4g2p*at*Vd!z)*vTst=p0Ie!>6=uwkC#b@~kW6 z%+wgmr{A5L%l%vF#lzg{KbOI`FoT*z;13Jp1zpTi(i=p!p1{V8C&DU?K$8RSJCS_;h zd8CB^lg%p@(nY>a2zWf+e~^K%*g=Rgo+LNcf00#;}rstkBorQll(6ByIKh9Toz>uMxp+DUT*U#I&7NON6>+&zbBl0d}b z4Jx;kX}Oc_M?!$9Zvmmr5E(4>;T#A%^Nz0CBfGx zOJOwEzERjBKf4N*jOk2{J;+*t&|A%N%p^qs?9uR{_VBRab}NSKd{LmyX?>9Nb}NKW zr-PZYp8%_rS!f>kq2Q(X;)8$v&PI{OorWYX0D-Mo zEP}YrbPE;vY<1-0lsVAa3MYW=0Q=qTJ%fF{GGPcIIstJd)`vz32_pASXCD zV=Acy3XHH2{Lnsi1EK31Iy+qC{w^0Za6QwA5s29ik7O5!l9 zn!aIVI6z3yBF-%i#tuylOKh-f?PuN7c|eMEL(0Cgw$xdUb2BSt9(v~{%V}K^t(uV& z{%9*tm)#3-06p=-n*_rs$(RT~0OY5Ltk~hB2Bg@Fx0^}X%DW$~BG9QdD7+S(n0J~I zHWLYGMwf?7j`2X8LkjQIa74J)9oJpFPWCqif#KVQ9{A3DHbm zEnw`D|H{ZR82WkYTs@32o4kGrgLf{i>$7T?)ntzyAIuaei&XE){{%BDBAPPi^{Jvf z({pnMWUjpA^tvY?`+c}xYrVjyqz+6{%dWL`7nE|uhXqulN-6u(Jtsl5EyV4ePIv?j z zVc5fD-NggEj#&lT+n(E-e$u}5dgjO4pGAlGwdFe8@RKc7U@|6nVokr72raFxCR_bU z*pIs(ZpAPJd8Dkd6Fmy!hl-Z6?YtL{HPiUkHTB95#BWU&&gm^u7=BI=i{k^3Mt-Zb zbSosRMpLV-gx4jkMI(y7xq3+oJU6ULZ2X+@?>|f+pScKSmW7ciEO|<$?KK(uk7(US zI8zo&AXUtsr5UrJw4R>TgA`ID354*_6H$WL;Q!5$ep|da?@R#XTR2j*SQo88b!HgT z-n+D5gCD$T!YoRtHBLwEU&4jaVuUI?{69zvpoP^)uC2+gf3KLUi2_q_OxDL$69@Kn zt&bOcEVmlDKQQHjqi#WTC!c6ywIp_%C?j87*B;KRRyG>X2O6+B4i+VArdhx(wWL_t z1aVhpr4?-9LzNING+R~P(mbts>AX@oJ1uw*jO0umZ9^m&)?}Jjw(Fm|Kd83W zVOYyz$LhUF!A@QA`Cc_7N}<#_E`VeO#A*#GbFyt;tlUy{q7k)kyXcB?;4 z(B`#dFVzTxGp)kD5*9fCjMR$u%!<^^i|SUce@{s<;?P-}sP*FDW61 zD;X*fZD_P76y+B)VB~AZAv_d-2h;et^X@87=#+D;$jqNFOpe@-3&RqUc7}D4l#BvC z(YYLmE}XkkdGjA-D2a<4lD2tNU;qFDIVBFFCZST!nD=H7tY1?J1?*R(DZMoB=Jv<& zT(&rRDVKw?HfaiW3aTujB1H1=65~xm815>)6&Oz@@tuNERy4GXfw~6@ZwP9iHzb=YAP&E=jiKXe?uL<`z!E<# zfhBK5CLR{`xeF~&LvId)e~-7k%h}a8U77IV_L~WbBws>i@NVgY`g=fr)jn_sJn}%r z?$Ki{29&91UNy-5jtNs!G5NDaO(y0J>L&S!vzb@X2|N251F{}WM)Uc>a4$NI(ebS_ zo5NuM`u-)j!($ccLg`bW)5eAs2(Uf8cHNA8;TcAOf$y#4tMDr|C@~^xko!J?onX4Y z#O!H-mcaLGK!^U^kdp#T$N5zb8v1F$$#`qDk&@;PpQv<<18vFCYmK`r*$P4T%wLOlRy|6Vtee~jU9t=@VCvfNZX$9+if_`MIkdF|Pg*)k zS~*MExl5b6zq6^A9vI^xs{(yxuhpw`T&NW7c|9Jxj}3!JNR5M4b^BmhZ*lc*YH@zR z2M>ue)hr(ZZE{E@vuGt@)@T%kZX}$U@b+!!oXypr=JM)_@ytWAL?VaaS7hu>YQ9R( z?Jyo73Z-*6F1bxqluxRrBF5Dd$oYx!Mmv)=T@lz8llOCCdR%^C`)ZA=kyw#FrJRf? z%-h4Xi@<9!M)zF-q3`i|kIc&c@y2CmUgKuJ|T3yi(5fk7!=w}7T+0iFuvE$M?3H-K~WD{U3j>Od-U`5hBq zY#E}t7LRtihynpz#K)m@TrE;LyY_t(MpSr{H54^giaGCb4?-7BX}XRE-{bR@V)kPn z#eRg`m^^RmQOe6-_~2KJsxCC{OFJ)-Ig^tBcTou=DX1t9xO$UVXN4If6~IuqxA4YB zwR}o z!-wyRS25#t<47rQN_bHF<){KW|7X+u8fdV4>I;Y*_-&=+PWsn+M$q>M+OmIi&xnVL ziDs_ydE{sW=%GWy^8rktT$Fb4qN18@;*DDz^!hX08X+$V z#b}UcCfGk#zM8Yu{V+ctq5Z&7ll}ixq(?dc` zBy!pq1r=R}=21pjG-}kInI}g~ zX)o*0yz6H8ky@tm=&!fag(&6HkayF+Yh5F>LvygSDg6x>wI6A9(N9pRwyYwrY%nOV z6>hGT09x@ET*VdMQBBqyZA&&aCgpa9X{kLDpMB1EayNRCGy&X`26RQ!jJLZ1elVGP15e1m1Bvb7EGP>!5dru@a6?b%1 z!oeDPk9SMgnYf)-`TiO8F=#zZ1n+kngWP2sMToI|kN8?NcZi6|pOlhX+oc$woeW-) zY=$_w=d|zfT*Y)d)<}owifAX^3$Nk*T0)pw0_5a&fZJ(9Qigpwi0<$;Sk(0Z0c-z> z_Fl{b^fey-woe(dh$t{UeL7AGW zLlxh7bf$H}T7MzTXzyO=_0=z_&!leWwepgBdvU~&H+SIu?SESeR6bQuEWlK*64@Yc zOY@Rvfh`nT{r%gSwB5YUdOW^=`=}45$6EzyVU@*{zm`~y9^TFB3@?qnSF^|X`*!1X zWTxvkk!bW2z@(gLO=&DCE)+G*bumuKn5-aZ?`*6+0IY>rV>C_WG$BV{E;@C#Gp z$1>Dl0I29mIGWKpktx2MEsG+34W@iwRyGtg5*)I9F*yD)Q1^cs8F)aUz-a|7bMeir za1%ba5TEDG2aKHnM{?65XUYHg zME~=1Rd8dCh54d*05zru8Gt(dNQ0|GZ<1$7XqDuxwG-uK1R`Hs!kFXsh1^SdEv?|@ zHb_Go*y4|);2(FPKUac(Hp@ze>?j-;|9TCBk?1VZu(sX&#Et|J`oFY6b2*CQuLVba ze=nHwbemfI$t9X3NsNJ7BN2|gVXD>AKy`ED9BD;yw?l0!Ob!ca8Cd?DBUULUFHxnH z;^JX%D$75vT9*8&wls9Oe77k$)6YGT?MKuB?(`3&T6FrhINP>lYWl;|rhL0Rmz@`4 z~XRlJv$hOWhQqI^8{|tW!JzQ1+=Ln&nFwp>w zZX$)keeCU3r-6Vx7EcjWNgQ)Fnz60Zxxpt>wHg=cwk97B>}EPnL}aS(@RHN*VUKq_ zb(Txu5vm(@a}_U=m^7T;3@wIQTV-SRuagvP=GZ0=-})N?k$G%^iQDFKQ`GLVu(plc zAIua>Gl8GvCk$3R9V1(8Af!ii?b`!x<@N-m*!KX}6e4!gy`HYekPkqNU}9u*ZHV38 zP5rkJ!&=Y#X&tc^(j7VRpi-%9?x4O$CzXw+A5XQuzc6(KpV8T~&R(Y}d!bw`+;4r9 zu7TUsGN$*vrw_gIL3+w@=nFRJ_sD%|Xn|}HdTclj=tF+&OH{*czTrXsR|f7uJo|am zif@5-FP|Js>Jv9|Q9e#}n`jVuKk%|K6{6~syt@k%Oj}PXJx7lZdeZrTpF*uw>_d%U0R7+G?8M)y%Iehf%Q#-)x+dNCW#lS=%jt~kG^Z${q>tq4T zC#h1;jOa)e{mb_X4Sm6h}8~efHu3Qtoa~!(-b;rA^7D=1hvx`^~)QC~{ z5=>|87lb1O!}boG1gA?C+|Ui$4~^0LQ8mvi3DKdHTC-)LURT2}rFRx?Dz#B4L_ zry5ogXA}9hv#QJ6;F7s`!*b$@W01Zx^iurXw$}D8hZtF`V*LU`G)Gd;+FA6$FhzVB z>8BE6LNm#?XanAC;rN|ViV0p<(4(Pn!CjifKaJ{7#|8fawIAI5WRGfxJ@JP4JyK4< zB~_Zk3qjIhr_90<4aoJ_2uuHgs=xgj-@MWfv!=5dg(7_%)-20VXEg>_3B7;2^Ik0M zl_^ahrMv{y6?>YpKYGOuR@av@F_6cKh7PQGG|m@8qvy1zu*(pSuNJZYx&yPoyRj=W zec9ymI8J-@ITSdm^(y)@m)3C*TjPacJJ4RAHgHNAK-@%090)QQf-sejkHeW*BrN0%*0BPlJ#*zrnr?jhigaoIrc#n3O1^Ba77Kt;->l z+@|>54-&!8tNH~u4IcUP8Z8jV8*S*FfzRVq6>+7&O9@i!a+BOj{wvVGsLtgn!}GWm z7_NPT^}0u@dgCx`m;w!*B%(OSy{#-9)MbLu1}vJv+%d&$FhOWqMoBSF931mA75g<{ z)3N_v_RRriMM=J5ca;1txR2obo6>`gpT#C2pUyt%A;k%?U!NZuV#inoC!+Alha8(P z*TAj+q{^$+5&rhXboTk>d*M&`)t{FNZx7f!$b1%u5q+$3jWfKmNO0m-H64Nv)E43Z zbC_@2ynV2ANuaVedPpVt-(Lv4bHpw!j)q!vzoL26)W3xNI-sA?B<&uI=d&iy)6i^i zDbK6sipXz9!nQTmQX@aG!N@C`dF@toWR?q8pFzNUzy_XRU5E+Bhz_@&nNKUoTKNSG z#vJ4C@!RB_0DFk@m!$NMS21sNvXUr!Y_LmSqXaqGVuFfWyO9^8fI}%1ZIx}%cc&Z0 zJfkvjB0wl!e%x zJ)$oqEhCV-2EzW2eFCuRC4mlfpQztugH(!5y;BO?x17SHHb0~K8V#8|m-cJ1QGqZv zKKt_?mbm!WpoJ`Q%^~dCg~bOFAJYGxXo6NJpir8*-CsgLX`dZu(9Ikt+J;DYX0VCf zwNzuzYo@_cm5f${B6lhymq!Qkc}-T)oPwQ1q>>-?NSP^%lWE1?nMkADf<#=2MKX)IJ*xdgNE+@uJy4p?`e0&;TxZmd<@jb+ovtNm*b$h_;wC9qmdO*>b zI|8a4hHD|)uI)dqa5`7(5Y}vs1F43?H=Dk8)Y<-x(GmS4RGB%QAiWi4n$tDHtpd@0 z9mA~xmYQ*a4$PsGB%B72_FvgVOm>{~o~1jy?^UP*wW=`5U>`&eJ|A=Ef1U?w$N2t=*N;x5dZOO1;d8zxZN!+*uR~>=I5jd)N1f%us-V`n{Ht!#tlHT z(WTK=_Ca^)Ur9jTr0L2>-?H~dQd2_&*$(Mbl0{VABQO+m^`80T=DlDNfKDz(j?n_n zIY;(?qcfcV)x1KnsdoBw?kBEfTmMLmPx{Pg(JWdt#D^X}MS6W(?C#&u$&-hn`RK|g zg4f1~F=FtlzI|$xq3xo`e5k$i@I92V&(9acI=)5Ov>^93!y`R+ocNI3h4FOz=NNVt zKhz&ih>7Xg_2TaB!<;{2Sg71;QF_;$v4M_pxW<_@SIR3c&kWCsDbST9!K&f}!;wJ*1jy7?RoFob6dgZnB&0jt-J*@LZ|4j*qMn`f() zGwR`jx~_isuVAb#i-7_`3F6dLuZjJwiE?X*iGsY1(p3U6-t>%Z4(1?f+!P2J8L8n1W@a;a5IAk zh|CRla7=?ck@EH*8+#jUhvMnLS(GuZBq`w;^IVJ(+-?5CBK&cN2~$SXO^PVk0l)t{ zC5y?NHeI&xE*mmePj6Vh9f*Qt4Ub%kR}TAj2!bKw{S=pqDe##a{4s3 zKv>L(R!}8$WzQ8E4|O-`&cHe5j+yN8js~_tf?B271H@TriXz!A5kJ_X+YQiNj1 zsT-OA-*odE^XA773pcxgA&aNnA9FLtlV8|~d^x3%b5tCu#cxM~rIr!Lqr@zN`%}_d zhiIiW{4pNa0#IH?SS-B{oOoO8!_4I+o^~a=W}Er2#>IllhYIjoyohlEt~lHSUMNU@ z22h%|P;*ro&w( zLLw^`q1A8?xk=N#b-b`;kMqV)LKHSV${mA}xeM&X`eQW!n;`TMVfuPw{n-6!3N?2M_SWN%sO^wk&o)4ed~b-D+d`Z@bG}9!JuN3Fq(fcz``$ z+@S(@_?1Qzgly)Qy<4@Y++KT6P`X&j}>Jo21Z9K?l} zmkZ5DCUJo&+8JWUvWRjrQMzNcQ9mIJGr)rZ$~R>{Jb1*g%NAxPavLa<3k~F!T3F|5 zDeQv6ap@Zo0`BNSx4NLXrg46h;QNmQNH9>&uW(}8l5kLxP>n31Sn|!*F1R{saG;bRyBCDpB}7F|&=z`1!GrQ~ZMPmV=x27VEB5=j!cm(8zF*M|i88m@sRSKgpOL#WT_{Ljj9!`3uCju3f?BOMPut8#W=!|DsM{uPePAm%*UyvC>Kk#4| z{%5#jOSPsl-m(KKalgBUbBsHKM;PcbjztC@Y>56>%$5PJ?o*PS8OUa9--%q2K?3ID z?CrV^D~Z1+N>aP>xzq{!xB3gg2Z{lc(=gx9DMLRHzfKLKtNs^A;A+Zto83LsNi%_~ zIxELU&c`@Yj#a9?r!UF@+=1wF}~AIHc`l>jxul ztW*BIq8A)c{O7QZlnz;GfY+fcon|Zgf>W5qcy`#_Z9fS!n=go(QcG@wBARBrxOW#< zgp|J}UTUQjtYp1y#}Pad))~PNT%59-?Y2_?J2OpUN0oAgT*|2uE~SB9GYqSiwl|q0 zZ6y__Q8_VNVw~mOd3atF`g_QV7P7C4^bJ2Jro;!=D1`oYC3rmkUJkU1uA>v6@ORSJ zRU*SkKd4w-Y;xrf=Qsb1ZjVnC3pw6I*gdFpR*MElYv+u=eSs<;{~I{Qb}uCSBat^< z8-rC5Acq^Sh38ck-^Z()Tgw4*->YjA$|q04>tnrmm6oyI2#3*uwwiHR_z%DHgTmc9 z*9`?7F&DVsI#gi-^UD1u*ItjgV`KyL){Yj6IBgV!i*M_VMF8=Oq~HNSn;6-F{J|C@ z)K9$T30^1~5n_=arB7vNa@+9vYT5d)4?`oV?Le2?BA~*7Y_i`igxx;way1qwGBP>W zP?2-=%8X^g;gX-RWx%ppIOazIbx-}!IApSyX_2qZM6Xv2Dqe|cb7LP%IY}r@YPSpX zygjJlw!4;Be09qk-Z_DTXlxlm$JHcFrk`_XAVPWUYy-S3Q)bRFi(|-v74u7`tcwXS zK&wcMq5y!oRd}#3@oSuL&1v9x3PnpF5?!QB!|SkP`_h4+xPg}mp6fIxByb9J$GEN0 zA!C41zl&Ct(x9Yq+P&gy$w}@wQcUKen~>%k+N0!^9;rYDJD)86@62N1Yk=&Ex~w9OP+fO7@_5HfZ%|Xf7-2F`MzRmqVu5z_V-+HGy|(TA{5| zl)P!X1??d0ZQ1=&_VFM^EyNBrWGz$TEx-WNiuN}gLq|z?Z|lmklqGyJ7trpP4ooyP z>n{`<)Dn|n@LW!Up6G0)kiO}N${74D_<~S%FsA1m$$~$7w2o)5t%#AtfI1#;CPUe z$%?Lj$5yB%PCN@5+3x4wu3XapIpBNIK$H0z%kzEj^XBK%0&H>~&Yii924kMugP05G z?Hu=W(KmxW!iD>c8k4hGhs8*)(#-+&eoD2H|HCloC-GyT)gb;_#*Tx_Yc4|GWIh9! z(hpT->5{X6KF&iTVP57d(WgcL(L8AzHhK~`pmbu8!DGL%rdEyQXTNdL$>B6B`c@Xt z5hXo<3oS9cdxNbSQciPpq4&p~V*N^Q-EA2xP@!8Y$rlHV9|*&hRBsh^XPOd;`=kw1 z;WIy?0ED%UViIy$p)73>i z^1T!C)-#*T@kB?1`=X!1#XZu%@zK;JJHmLm2;{S*zzf^EBp?z&Rd(S68aZmZsZ`& z?IL`m>1#->tvv2X&IHx;d(mFH^BD*(xO=^NMM$*VL;st-w@ghl*=mc}!wL>CJO8tA za=r_!;3@^jCKt@l*jAKUe z=T9^}xw|L5fxyCgkL{TIXd4H**dgYB^7u)|{#gPZyn=C#S@LhqeEC!9a-0=gotIng z&y_%3P;Jya>kgja9w#HSqEsL>Y2!Ft3BkAk%DM?YbZXBOJscRBl! zIG=h>G_57O&pfIiX9Xeu#agdsXbd3y>1XOnYghx{86j5mp=IaeW4jeiSZ*isY3Efk zBg)>fFq`b|m>aw!L2UxGggrK?Wz50#{o9UnYwkpDQV6#(hmNf!-TU6B98p7Fw1HI|8 zIucKdh8~e?vc=X4t~{PT8GGpy{vT@X4PH)|8Q=$fHaGe*33ihn>wK9liv_%?v!&Z= zqmVihK6XKN?N_w(WO zl;&!rNR4-!O?U6U?b@8^wzm&d(~VxLf1N_y3#+UOU$Zl@6GHUE_^qFtK)Eor8<1Mc z5!~y}`(St0aOQ>Blq>{?EBgKb7a)x-d$!tEQGSR+75|MLOM@wTOY8V>49j^A-C4E| zdHL-U$yB$*)T zi#vW4Y5c}KjmMuoV7WY-c~NJ#?O16zfvdnI9@4k@t@rQHYMu2Gn@*R>>=C?8HJCMt1momqxKvGuTOp zZFJUyPf*@Ve?@9<8j%5|REYYw`Z$ zjopdq!nMz?{Uf_4-Q!d*2*XOiW&Me*mm_2B`GxME?ehg_uO!wh(E+{{-5Mn~;XsT6 zb;E!sEJ4MY-e-|tcx@nf{tb?j&?v+jw19jE_lmsGrr8Mu{8P7 zthp8(rc%X?qhQSedh;RxtpQn#^Z->Q&dwCEQCGgoZnDd2u6s0%x*~|n(46W>7@1~$ zRQT29fgYKV;+#}XWsre3`WyXU2MTJ&*%9|kCNr8_dHJ-&eLv8dhZN-e?M0o-j2t1t zfC#6mn-&+|FHaV$$4pV7W~dRmwDdNG)Z)BxVvC*g*GD;J;Vy_gGRKc#KvI-7m&ckS z9KSF_sbW&YHVyto>ZP|g^SU{q%A(fvZdV2{n0-E0r!qxs(@k zN?F&N4<)TivbCu$%gLdaZ%+hwT>c#0e=e=3*h)+<&TST?#?Yn1x)&xRDyp@v_|sUn zUdsY$83YUP!SbtR{Jh@&!Zd;K^TJLW>X)7C+AivIeK4+*=U!4>s-@ET;hoqXsl`N< z`Brd<+l(37-cu}d7OfYzj*N2hWL&ZGZky-t4>(MU2H>1MHb`d9Sw3InOgzs2%)k5A z_J@iWFWgK)wQ(0Q>(O80_p>Vv$%$}aT@8)o?fnzkSm!5Y-wO-&H5V)gKQ}aq=f9X! z^%`pGA6zypoX&ec30`lLJl^;CEVTGg&5a)>EEb#$G}ewmMGolLGiRuPEEM`hzj)B1 z>9^hywFHx?M0+SG7cd(DH+PCS?m@KElL+0HtZp$8+Q;o?w16^V{og^rl1oM~rk1*; zVVjCWJ1iLokaYGzYsaAps}Tt+vs+)>+txjxf13CH@LH|E zwSjOm=YG+WEGi3GEsywUi0URSl;ZZLlxUSzHXi?8&4@sL*m;7s;+>nBxO_Cv0vH#rm}ZgXJGRI{$0-5+Bteq0D0Ra>StB|`y9)Ljw(gWo0B3Eb_sv}y^XK`I z;r796x!>f!(5Zz5c~{~$P`$QE zVxD9@FHP9|Yq59ovUAsmyg|z4L3~==XWD_$mlm~EmM^RLV>A23{$EXyU2E3I zW{|LFEDmofS;5ViIB)w;JS9Kx{1y$_g zMsw-kMx1-_DNe`oyep|jIgS4Z$!4rCR&<}l@Px!D%>zYo_+s=4s<5%$(RA8aCrNSm zQgozL>*>#)L~JTTi#@4Bi+S2*?#2Mi4*UCMPKxP0Wjw!)YAl}(Z9GP+h40UM&rR*| z_k;V9X^4A9GI?XN$uGgZgx+Xy;Qf9*=mAl`4oMRMpcu*k)@MC)Da{RlK6u$VO_kLeU48tbr?5{}21Fy7tHD@+?#Gz#j;r%(02 z`E}ca@rImwdt2B4U<^6ENzS@btnF`gACOGwp0+>sSv*GTa;i~cdpBwhTkLuI4LgY6 zp4);4Uxh95OntKO`^>Dv%Q4#?l!GtE(l~0*bLyyv546v!PA2Dd%U5l6qR9;p3k)$%_kL3Lu+%4X}>HNvUaz$~%8mO_O#DSxZPIi7)8pCEY5? z0}Bu{-B7arR+2Qd_=H5k;+Ur}mXgO^1l{NpC+@U1t*eRJ-}yt8P~UFZ8|=7M@xL-4f44%@WW3#@yP~v$Hm3ClfVPss=K{ zk~I4I|NZ(Oi+z17^VSHrO(}2~cRO22^<$eAub^6W1JHX>U0a}SzTOaR`&{R(Q~TF( z)xELOX{loFw?&D8wZjh3XkcL{kgJknb7_d{8Imj2q~?hba_|)6Cqr|>ZhVH-@z*QJ zrP}$q2GwR>y>8SdGbZ#GXW}y;VXYM8-6LLt&aB19TlY-xK*k;PkOs6PiN}XnRwktu z=U4Hf<4>iJ{f=60EYkyMSHRKzJ!p|VM9m<0Zd{>s=n3COj~_HGbKTDDN%P8`y;5IS z;xTzC)w=30JbP%}wK2_Rxo&W|ZeRth2{$hdlG7k)Xba?cZCu$Oe0Pt{+gXb~9D-TU zqOZfdbgN@nf(V$eJQlwN?`tr>yZlRFMk^kL3LQ0tTXvfK)|Ul;4JcWNC$%4o@wfk7O-yMaX#D%jpW`?{b3P@S7ZK#{rTLua<@2GYkDA9kuHJ7Bbyk~ zjB63T6ygP^`&H22S3`fQHbi+sUyOGcUJ5YYc1Qeaf_Xd^GNMBhlCt=AqkEVt6bQ-* z2HLKQKPtwz$Fkf^_mV?&SE8G-J{SphPsfIp&GVjSNA46;B{U4)jTV1!3U3>wCV`6u z=!BjbuZ7O^ZZQPHwG;*eLfrj99`)j4L6nTNu!@K~K%9w*=Tr8)+uOB^efqK+Tc!5E zpvGaL5YhSO8Rp-Gn48QFJmg+n@V6^C#ifb<`&t)6d>=HUVY;{<4KDeFdwsdqL=^w? zzJIFv2NR5C}eCR8@RCyOvzQbipeWr6)p~pLhLA_ugZX$~WprSaM5l zC-(-a2a}Dh`AmS63B~5T_VQT1)xmhC>+dnnA4fEiLE-lk@N}34lNwgky)fe#2JGck%B19@N1Y@X5 z_Xb~lW)JHrUGAiJna8WyO9uCsLp1r0$M`jYGD*7UVP&P9BX70TFv_FrRP{c%#cHT# z<;M6PZnwvn$ZKWO#ILq#l5Q{;f(0T`%|DI~HpgTEfcwYpbhLm(R*rL_0jHr$IljoE z1JJf}AW%}6a?ly-I+y#55qYiqWm0{I>YXH#V$IL5CEd)%L$~m1I&w#8qqk&2F{oXF z(FV{tW?;V-p!}0ee^jw0^xNPTG5KquSRQpwDK2&O|E~67+c@Bk{vapBEoxfF-b;5n z1$ZCJo8yfgyDUXcS*L6=eipw3^4nap07eGYp}%qs7MQ2yngFF5(F!fu7aZ9mP5kaT zG|iaSddKTdE8jhPF8^<>Ue`(7j;lfaKkK{yJ_U-u9g*(e8Imj?BQp!+x${anTonFU zU+K~z@e~J8l&c@nX_~J`3*1CKR0@Gemtvw{Sp6{ z7Lgz?7Del_o1U&J^DqF+g#OBhH5#GEV3XCmWMJWlT;C`uxnGM}T+?7&FKRc&#+fcf zFSt`APpM9B!Yuv-tE)}i*Y&LhPC!EC3~;sF_;;g8)N(wKe`kO^_k}(6MBJ4)?B;C9 zv{bd3^Itx&{(j>hR3%R0fYR%L-!_dw7Mtffwja#?Xj^dPKYs7~g;F(AUc^@!*aB-C zphv$Us2ZNbN#lF|C7C%-c!%QLlCb;nFTW>CywsU#JWA72ucXt2eN(rahmMtZdtiBy zOTxoJlc4wYOLzYOs>n=(lVSy0$=#yRX6T3qS-tt4Y!T~8I>AT_+$DU#){X*-6o|;mrQz(pHB5I%?h!oL zi3WYQ{4f*?rI+gm zY&*!qbZ)zk+x{10n$Rlb{l!tBttB zkInVzB6toPNk2HBV-L+SWY0DG^$Q>?`p0gA!1UGJ=pCsQ7giFW3pmr`+Y>Xj_b>O85l2fX%ASW#+DivU^r-x;3fWG_t z^kX3R6gDShkFThO)pT&-e6X#aP{pcEnV(S#SXN0iBJ9G$?cp2tL0JuM#};|&1oz0 z`U3POu^<6H_e#+eRY(Lnhf`cgfZjXcw3Xn=MunX^QumOWNAmxN$50BmiqAQdbSk@l zbB(wdD{}juy-@VrBY$r%5ia+cuJ3F7=qh~8`U6+ui3fZBlhB`~&|OmM3T4tMYe&&C zQV~LC$Y0D7Ztp>2VA8+)eANGl^zTA$*B;eC*|^-IKcxtqmOQo@hBq?7v= zy2lwEhCGD1eQ3D=1UDGN94|ITp7GiPR!{~l=O-jf{0g_voyTdFMop}u_4dM^PYv5` z!|hiQ4edRBDOkBqHQkl}J!t-Ah=G1=Z?Fq%!~;f=oUE~SQrFRB-aw^vVl=vKyj%Fi z+1zK7^P6~I5!+3dGx6L+F$HSHh8s4q?TQyz4$A0cf4+w96cMf3tJxO_l=banq*HweS2y1(@J4V}|ziNNs#W_0b* z#LV$o>iqF6waf8s0Zf;F$7_i823(Baf>|Ff5h}z;*F9cpS?hI3Uy(gc9o;J$(!!Y2 z70j(-x)VwctPpj;0Ye%coVNSN@f{Ig9;3{qT}XnMuyHp%%5Opj z8{n>fFhg7D-O#<=VbASg?DPK87E0Xi05279f-D5K67ek6y zec>qR$N>x!K6N0gEIU9`89E{$?NgQ4c*)5*rsc(GWJ_ zH1Xh*MTV#pzC_704QV5G!1c*jn=Y)aL#iuh;MHEQM^3>H)CyDO)y|Le<6L3OB@qO3 za*ez|YZ$2#=hTn1$ij!qB>^kdkUFsb8j=zjZ^E35vAchz*1o-Z=6HbZRYX`mBm3H(>b zLiXp5WU56hLp7N4C=19SNzFiek#lZ6e~+wrK3U7S87ubWvfMA z1t@R0MfJ5iC5c2a>MDBgkyqRalihKyGk$-ET?Ff0a@ooL-_7BN?cV2pjCto14id&E zC?Co5YEWbO-&)v)|DHxmp292|z(&8LeKDg)KSA*)k3Y{4*sL~`;=UJ)2EOa%%Z)0a z>b8N}pF?40;XO7%6E7&;?+sZp0K3;P7s-7)zg33_R-kni(4>b=3(ct%K?ogwS*yaw z2H#;9dsp0FZ(6Xl(K2SaGcCCDwJBGk#Vp!&0zB93g_ZKGgQnz!Dpq1gLS~IGKzm?N z;zg-j?bhX&DoeUue(sOIBW_?Khw$-r*=u!MSA;F)hfSywx~-rmeYz)L8t5~gnyJ<2 zt=H$PyR#rng|Fsa8wHM9yY%q|kUbd_q{Nwz!uPmO=K7UeQM9Fm*l*tm4ox}g?IHMH z=U07@);e`c0zHsE|6CbpvkFG!c#(Eo+W&DK%>Cm!q2##a{~GVgL+YZ&zklurMxVqT z%IA(Nrp@*XF<@k2vcy72pXJv_*KmUbV8}5u5@MmB2|(}dR=r45Er0|TO66B|&(!=5 z!RJCJoy(^?)lN1?1p80hTA&9L5(58b0uk-FE?XWW*!RR%Q^-1rE*S6>Q(ch)ivv6b z7Pr0>X?Tqr1Wj9IFWQ{M2o_vB3YlHksj(X$OcMj%HL<=R1?J8ep~9T5wm&M65kO4i z1a1jsb91%D^Qq;NX&Wai^l9or_}k5HDFeg;&KgGJ+84n(Bir}*Ht_=x18&F%9YxAZ zq+WvUs1@LRIgUOc7P@{hWb%+MxbNUTRH$urjCGi#GLf&QnntJU~j z?=3?TNvnC3Fl!>n!%2OITBV`*7}}LwAIanfU(B#DK^#-s?pB#Zzt#|pTO{=za6uJh zHK>OVLB6lk68xAMhtj_#V87`I2t%2%vW`!g6bJHgBFwzsV99XDpAzy;?`qNv-?Q3B zXkpg>r2Xji94`atlzJmr;oUr&r!s~ZKAgAa?s9<4XRQzMOTGRsBjv@p?q;q!om|+# zKsRMRTUymGB{ao8BDZ|FvYKUZxWlXG*o2#&BWJznVk%hLKji&arJTUK}A0HSq|e_#K<;rLaa zvV95762u|uxx?EPB&vhAQdsQM2w&B717uyh&6>0Y_&NhK@S=JLFU*l9A{(3)FOngQ z9YG3#Vb#y#eIG_ih5yzN7w|H$A{Y@nyV)i!1%15$-c28(_d8m2w)Cg1o3-QuM}|9%0qJ`oj8h@s)1dYk0bBJZ^nnMI?W6# z9f9iUt@AzV6$?fep!X{?VfltqH@f?}rL#`l)X)Iv#ez7#} zeg7e<6L4{;SYRo#hw1fed*aLPY^*(hO#Mz0;DqT-6R!kDsro>mkC+70qx(9DhC8-_ z+w_-ITinUFV@z;)088kVRs0Jsk6)q3EC3JT5`oj2*>Fe1OyCW~fo!7H+K*sy=8$Xq zEk)8Jz3{e~lGW{s4qb9S3RAN7$b8$Y7tV_wh(;&OXN_Ev163{k3vbX%$O&MU6`iey z*75b(pMwrIpGdxb_2wUvSAzqTo#n6NCG4Qb@p*1Wuhg$>?`4=?aAlRqiezdpQXffGj z{yDicTPAyG5jR|^tQS9);ILPcX7W>tZZSJLhU0-s4_Lc%;$eq(N3Rh4_z~lN>QCQB zU+(05L+0^37;~{=%Z6)MY0%(Zu1bzI53Ct;k?=nT8v|+$&8fPe`UL#xS2@9%Atq!F zCHUg0Gz6K%1WO z0d5dnl2Zs;Cho*sl*g>YEfl8l?flP=oZRHD5xMoPbqgZixV5}=kt9^(bQ?0u8gu?B zu=3BYAnqw%&ff~}tR<;%1i>AFqV9%$^y}QxD=&L$vyZl3Nt^ zxU9n+C9cFhek%dXnx?$aZz?zNyutnF51&>3pMlDC%qzc6)0J~fz@dVaPE*>X#@p0o zQaOwn+k;*N!%3eGm5B|4ulByp2Ex=p`lLXuEG*bw$M#}u=K@R>Taz|iqc>bLKjbKa zH8OY8#(`PF;nE2hknl1RJE&d)sX*g>C(5x^@Mc3KHWf4n*ta{AiEzjZR@z+qX*Q*0 zAC1B7E1;vr7O73}(YceKN}^a`feELI0NPe!Ac{13sxC;g?``;6L2UPiRey$;AJCcJUgkC@3IZ^d zVTmU&g{|3z7kWNHO|Lhac=|`=W!xljqSjOxcrNLk)dQr7lNBp|OUua#JJ)g2i`)pj z%(}*ql~@dXpz91TTxyRUUCN5TsP{I$_(87M7ObUV2%Fn!KkhA9@$}iTveP31o`0C8 zYX#so;RejjSU<7}s6_YxKwYgaS5Yigi&=z91bP+H_9ej#fMJZ~5XB==F`ggN+O2zU zDS%V$_wZAkF3?cSJ@CJ8^Tm|8vJNSgq~Z3m5^!HwjG#hbCx*Mif8}2p4|R=Dh^*7T zKR4gHuz?a+#FjDU^a3&3`c2Xza!zX);|8=5tenzez%~2Qi7N z<|2C0H-?J|?bY-M&WR=Ar1l}uz61@~InLClqF*ZR1B~^LSTRECyM4SsR}miz?Pz2w zFZ?9@=WT{gZgOPuh(O&m66k%50_*QMSviMi(B3eiNC4ePHg@`F(mc%BDk~KN6MRb_ zF;ghlQ5@6xD04Q~lhi1js4jCR+)JJSXGjgPXyo7s|IGL{E1RruF3K;%EHwh&nbl)?r?NVM z%2M-mZT;Rz5SsLUy6+cmzg(Q8+O9Nd2~lVqRI>W{f5;`EBz`y^v2tTzZl$XBqX|*d zf23e9&YSDa?C1=oD&ii?7z>c*6Z=MwaKdlu0pM4HKu+Lexd~zaYy+rwV_55G!p0GX-6%bXEvORle>-5h4 z$Ga+?0<}WfkdJ-;QmUHYo6$*a>2+-K&F1+&ip5}L=<_*!*ssxF`JumTB1s9(Y(w+0 z{+qt(g4&ffdi87{f8-3K5&G6NgT-A)&xb$^+G@pcy=!(?OrC{zpA0!c(=WxlUVogqplcoAF5 z6(KD*V$G{vRLFin;k?=@Ej1;b=EihP1a5p~tVS-Sl^As&WAr7}cDj~+%J@y>ei1F-Hg;hZiWp?Mt z7}1VC!nU^TbE1dfcn#TJy4nb zCqsdX?E7;E=${7>)8EH^+-~cp6avlE`T=+vPzk77wP={+G&|F6DeTB64xwWX0vcv0$*W=I=cn zh~oQv!@Kd|o&Wqp=RgaDSltuagi5QQlB3tXV>6TGNdxfxN@|oDoMH1^fSBEZLGVVF z8L^#a8}bM#fgO0?u?GMA+hMFqO=_3@);f`pHZ?Xx^)Wqc=D-*geZE)23oSl5ur}4+ zL+_%dK2LM<9`W|KwQ$ZZ_=B52n_oSNdwmM(+7fKenC|1M5nK1kq_~pBI4+W;9gC5= zJi#G_MF_x?`)3crQF>W+?`@a9dea<33iN~|EEeMbEhoMx! zgY~>-7SBlEKuNWjcZ+%4X01zz7=Fi(Y4V@-ih9Fa+kqQ;K? zloPvoCd1Eyj5iE8o(Jom1T|-@v3xCGHAZVYRDaqyQ~r~YNr++eP*!x#Qbx3Ag;x_8 zbft_coBU~&Fzua(a+~(OU|)RT0hSogSVpp>9h^Z_IB)XUf4|qVKsnW;!2kI8#PW{} z$){F1r(JjSO3 zcc++tZB8x&17_X(6UrRtj2y11=$zweE>WfHt3NIMtf^<3Q%PFTDvtl0r2l=mnnrh! zsoq*=l8+Iyx?_y10G?l@H4J^8q)Ur!8pG{MWh+Ip(sO3k9~k|--u;5$Y5Dx5cU9wW z2gJYg32(c3r^PZ0R*)GU#v^f?`+d97{n*$XohdaW!G68KH7xC7<;%fdEM&u`#cLk* zW7hTf#%|x!9aSl*=hZLsp%E5cfGUSHu|c?}U=W>d1-25sJ0;BtL~(FmxyAfN%`Bf| z3m!d^Qq>}}#MB%2lhd%+2K4?>A;Oln80yN>%bNLeX#+WwI`#OxHg*E7bQZOgYgE!? z?iwfKiqJt$=&&lXn;*aP8-Gd3N9!~tzP7*MVI3o4*edh&tY6CkPBY%dajkj%=+Nz3 zRWVmuU=*};&_?DS1LD+p9#h9&D$iNKe8YO`|A2RMwr6TC-<`)th(B`s3)P^O_}^e**J6I5 zs;Ao`*zdhMV(%H5G6e?6mbG@>8oE4Z&)aHUDfmZeBFxoBSEtIjOz+B_5RW!5wUv&7 zHh?GHcMZdCQGyt{U*{1lvn^bvd%-Y7ha*J_u(_THckKo?(6y|ml!`RYTrz+ZZH23$ zH?QO~eA&`L76IrF`+1sP?B9rhe|m_-9L$rcp$gY*10lAfUIfg5yDoqQ?`S4nBu4fe z2%X<~zkf)ot%NSUqR{82MzEEc;=I)}w^HBF*v`|k4CYE`&zcl_YH2cS z9S3J9emc;}RNmkWl2vv2nCAASz_Ph>4P5pWJWLSv9wvWX?5qEUqm&K*bDBQ!{Yla# zxr>i~=#$DrC^ST|VUgXT1YXNG3$hX@Ngb?NKYIS|AOi$S-)tok-!;`PFMkc>fhpf& zK4v^Txqdj{OH@GgU~o4kHP#08?4=m9Li=?_R*a{%HUrb%) zS}ni4-YemqXp`{wA5D^(gWbwMoSo4l6s?;U?RLzs`V3!!nEB~7eD=*@QjQD5i_^-P zL7k;v3K6DFU8UBc1K%vT_^2olKV{TVwfxYjY@%jy-%-9&sQZ@D6*M`D7BgV}Ded`a z_Ak&hv+UVN7@DWcbtA62{PquB+9dT1b?L;yG-GFnUh_jy2jt#-ASTC6IE~Ses@xFE z1-Ks=#yy82(}dHQ<$778bNS-LR>kgbX1&5%BKwQZN;?73aYkBTJIiX#<-|nV{U$Nq z5^E7TB^B1~1tL^{U?sLXj!{A^v80k;=Xt>^1(aUq|EUsk&{b zYV-`EKI@!+icP$Kk2HSCpL9?{tZ$FB&tfJ|**Y{kTWSmE|M*|fA+=MuRw#J@=uCqX z{Q_oUNusDMfJa8nE5kGC&6xBNm6S?s-G;{O6^EG(hyKjFf#A2b%HKlzT1_`Q%7i`Z0i7vsaP1l~3{q73;N2DZ9j@;+>k@h50L zGH2F8XpT%Ts*bpw236XWfe2W;6CJbNd^})Sgj>pvtnF{0t*@IGloufJZM3is#L8-S zSAS@61yRa%=7YE1lt)ddWqA?HSzfI)wYNI%@69;vLk|Sc&Saaf_XGqV58QoEdO>!2 zV2Z&$@yO?iIXypxfH&S(2!huGDJ9Ma1-DN$MyJHJ?{^s&#ClRg6ze4y=m~E#&W~WQ z_#CER9oI)bPqw@`MRL6RYm1wwg4!Zjz?BFF0xEnq6W4t#AVmxn@6}M-wNq`fm zAMnuJhs}fQwrD7(qNPWtDJC*Oy&f(CKn^3KgGO(mXOBXbpp81Qn+`JLOPdHkgkK}m z)YvT~UfbxQz4@C1E%zYHv)j`ot!EAJc=1YNDe#Q0Go(kqbEQkrWQshE-}XgHxI|3L z!yLr7S{oQ|QFrII)8OXVhiDjl4&flB;L1TxEdBS!lX!hwlCs&uT8V)l=wQV2$xb@B z#~&L64Kx8{7f7ar`X#QDhCs!E<|K$h1KUOQK;L1Xg>eYEYc|;- zJ9C=+4!<2UQ_tz*SHg1KftFunKv3=|$B@8-7;x?B(t(Gy-qzj?EIIb+-^~s)?`093I3a zs`jgA3p81&#C<`vq}&V1Lwh)ubq7Ck{3)e{20&<*U)BWEZ9obQv1Na2(c;Sfo_P7k zXwFzW&(&|5d5Uwytgqx(uKAYyMeGf~yT76xp7;WJ>}+csZ+I_#dBK z!f2!#*{c?0wDHfq4N!T@H!zU?l={6%tu%4o1b zNS*(ju7q2;@Ak%yW5%oHGb@DH7kJuXB)#Xd*O#*BPvZo9xcI&nLZTN;$Dc$|xAv-G z;@=2G2^a964w?#fiZjQ6b==C%9C~<1yQ-kM=tz}87}wt$BHO=@I2uj9P_@iUCmpBj z@CS;kTmPY@7IJ;@#pt8>?Dy(6&*TB*Ozmq}-Krw#2FnYc4S|=2qA`P8gfCuPTCRV~ z1tu8`b{qD$=eXqzyXs?N6C46Xd9dhED$O+-pE|?}~3*w%B zD{t+LsDe1oFd$Q*fNpZZRG0tJiJcDcS^{|=++NX`fAOQQ*rTQc>#4QgBgXrHSs>TW zD0qfk)_4{;>O6j-0=)8TX8i4l(vAaghl;-9{u1p3^l$79BVe>VzT=Lwr=9>10)Y z*=vbr`mlf!?RAYndcPBgeTks~f)bbouzw7!RiQ$w+Q;49U8_bjc+G1?U*un7jEXz8 zk&K~Rrox;5K@|b{qtF0FePANzv}a-A6tY<)AgN$3|C|SyUK?}q`^DuY4vLDN6Vy`y znO8uw9z`_m;kFY+y`MdV$k%0ZtIwW7WXF=ScjBXw*XVuoQz}Y@|%{ZiNrExC*FDY zxLhf>n4&e4FDZ_+Y}i|0q-Km~k^)c6&0b0~o}5ye6&uP}OTOydOyA}~Tr9FF{`C)i zGTHYgxyhI8Yr8qnOR_A=XW*etkKDhlh?f!bfe`=Cz$-j)X({w$5pipL@xN25O2qxX zkfQd6sPTamDpM&mw%b_^V3+>Bh*IvgckX=9mO=5z-pOnh@XE01LcW;!mJ|CEJC#Jv z=vv<&AxU&deb0)FbRlht`tRfEN3fRc z+g{;6eEPqSf4Bh;mJAU^rf6U>^n20G!4?MyWWq9iwWE zuWEfRI?0Yjs&Q-W{9Ip&Sx^eXJ^W(glORl|gBGdBm-9~kcqqkALMYn6?7CK?awShc zEwBvjUxg2j&37%_qBgwpUIxJ-oarC2i)riZDGNOW?s~f+l(f+(Kf^_NHDAK=IH9BV z%PYJy@B)sYMNQl$C2VLdGQS^zmAt81^3!X@pE%o-&sqD$B2C=07RxNJ2Iq>CX2fxG zQLhBg!Sx+GWdpX>}|}`&u=?EoSnLN41Z3Amz0ISWkJN9 z5#<#-dztZ3&MnGMUE_Z3Mip=Gu!12V$)$#3oE@ULrhuhl)iL`nt9zZ02 zRnS0dch*9#&+pm=J>YLfrglty~IdX!v@!9qWu`?1*=_4td zZn43cR)d!IVLhd{9?y}Dw%lm)D1aEJ&eVhHCiTm24i0g@9&y(Z# zUYf5U8k~9E*(1AB^xA6y6?aAUEkjB?aL~j07^@;< zD&F4b*_n)3Ogj9qHwi|=-JxRp{<4jRCMHA zw;%kceU9NC5Sh)^^DtlnEO>jX@xro?o@)S#qzWOqKvUgZ&mj`~V`f)cU-E4a zT1@*o_)^Wrl#L03=Foujj%%ptk82=iKp+V(Z^ymn9 zO{CmB0ljaf!i>e$+rY^l36gx0aA80|=Uc+Ev}8&7!;lFe(#p zox>qF>n#L}5HopMvzjOfSFy4rv_@bvXb^Q3kxuSJZt1H2+zsSO-=!`g0vBse{tH)L z#H`obWjCUatJkk#Q%tn5ouI|6eaUuGbEd4{QJX2M{0w@O zs!20pjZPQwm+`g3t9v%eC=_?`;^7${2H&0jpQo*VQvXl~(mx^?EzzcRw*NfAGc*E^ zAKp8N1uyogx2#<*>7BQ2KYY!a#5BRFxgDg);mlBXPUP{iqPNPXzg>+VS)EC8M$GzH z5s}Vk(N2~rH7DsutEgBwA^_k4nDHDdVRGLb7q@0=Y+Li6uYP( z?IQlREKFJp$KHT!=%-Edz70aj*}Wo)I^Bx)Q+cLhmE|Rf*Ns(4Bs?preauFF(rQ?!$fow(`@Qm>cCp7oq{qT$$3j{aOTz9?7}vAo->ZYF1=0VSO297W z)SK@v-)n3sTXk%ZU4Ql|}-xu$c?CL!Dy# z3tgeUG}*oM(|y`e^L1Y)jms5{#OoL$kS4S*IRrYv zDwYuq1hOAJ6>9OOzk!5^Xfp{AJc%jwx99HoOdYc~5!I8O#+%21IUL<59*15?*URfZ z)cn_;J3qiukH@@^44y}A>6r=w&c^Yv5f}L1y(Fa!~ zQWxDJaUTRLMQG3Z>6bw6mWYrs1bj{YVf8h@O{)3$J<%AmOXGqsTV{pv`DY{!<rz!x9a2Hu2hPO13e-nTC&n2l5LA8*bmBJS3j+-6`Nh1{MK^MQm+?Ks z9R&vq$x@Cl`fS!)&hQNYk*dmvJ-KKIu@?jXE-9%EavnET`Vn-7-VX^QflU5>h4-zc#i}(3$Rkvm`00`$@9|29%s|f4;JgLHJ((9DPZVr0zYh za#H$*`dZ|bP)%ag?uorCr{SG&N^8V`lBgX#S-2-Rd#I~t0Weg=)(DI4e?E`@3;Un* z?!Qlm_j2{U;~?STToK>E^uSt7RUxL-+TgIcKX}=N*vze|Pk~d1uJBFAF1v$yO65sNU0`etY`biB5!u^yvaPiwX85i7X za#=RJQjNLiRX9JrO58rvT`{?f-4!m~jn1=Y4cA)gH-NPrau280+n!PNfT(-`pNLvE zb#zuUs#V2c*o=MfQW;c93)@~Aq9gMjHEdf5~y>7wtDDP;h}b{%(dR0^e;vhh0^Gj9$Wx ztNix}Y=gh%C?=_A0;5_Ay{Q&MS&6bDN|b{74WQf4m_pwd>PNKmcRQHh+2hyfQ6btV z;RXgsM>~wipAF)RS0Kkr0fZS253e&EIrW11;d56Ip@}t9Rq`gZ} z?|BoU=`Wezta4SWbletjowWK3jkcSCCPCj-wAEFUF9|~oj{+~>)4kJPrPNF@%dY`r zr(17*N%kf48rb?m5`f_)x22ks91*qu-t5mwC(Zl&&v#$QTu5iJJ74e|l71bf>)vTU zTD~(-I2lMuzjmEh1SWf*_Py*G+^gfd`=~9JL-ES`Kv{*oj`I z5&wC928;%83h7`&5txWe2N0rof*5G^Eso1p^{+#@5GP*1vgz? zq@IRdxAVr&=sOaUMTo$I&E9QaXjB}FPA#<|KcW@Oq+VRmj!GP%{(vdJvtq80sfHb` z+zYo*?XuAXOsFs;nhFq+z+1-e!OD7rkZKu2+(hM(5w)&w2{1d=T4XKEOua!vV}x;) z&dG!38N!0EYp1TZvxP*tHLQJo8#4WUmfkS7w;Y9)>UVyMR$&1cAFy{UK!E?1?x8_e z$X-zk8>>Zbv@?mCVOHeVHqxYZjk#U4Ltf?@g}_uBauTbNt+F!H1eLPyJe;)`3Lqj9 zUS$PA=y|A>SzVA0O0T;7?(z+AfpP^Jx`ES*q1r7gqn}|#^t3)PA={r=RLk80W&D|G z0+A2dPFbqs@+A7u-Ls!zEyGU$w&4aiBO@s>l# zvPig963RKO2gR^2%zM}uY+PqcKd@;&cm)LObro^ahUMMu-w4T>lOgo5B z2Aio0B)!wq$Et74DRC|K1A$3^=Sk!{YG2+fyD3S%gvf#)iB_e(^L{t zFL-e3RhGviJ-+zgi1gd8K~yd3{L5CHsOYN)WVBZeHFbW$hO3m~tULR#aZv=D^empG3d#4+^%`0Mxe^13^?8UomNi}eG|77}iugB7vKHe%Je)@@B z`Zr;wa)D$q($mHP9t}uzk@!mkaZ{V$bqNWsI0Ln^U%XGq*Gu{ecm=Svm}ad?IVIin zI7hqFlU)hW?ezK?1ky(&VkZ%Wf*!_%9kb$ubPicDpO^HaRQOfkRU3@8K3b%gZT#p* z9aNEG-}}}=W_s6#nWRpp>xexA7I_;%(M4-NO@A*tP|R0z^x|7V%Yd%e7zm-sDo|6U zz*g$@1AhHT19mc&RivClPsC{IThFKA_CqYD3EhhiQQc4!%Zf%t0;^YmTC3%!g@wYd zOf)~+)bkYMx6&mdy#B?L_W}6dtTT=RHup*RM98G#HJ*GTf=iTLi4R2kke*p0* zf2`E$*C0WO4IvPXmqZ8UBe*d(zAKce6_AmOzpuI@(4RSayRetk=*?DsRUt_ky>FmE z)Jc!hqv8>=H!1*Z)otdfQa48f)&3I>1t_3J3b7L4xdPGRs~W*ct$)U_FW2%!>_Giz zDn2Wz&O*=mUIB0q#K8Z`>v_NSg}z1;E_gnp6>z5uJn!gb^HM%_+hsyAKFIG#eo3S0 zOHo>oR_s&ld>NtW$8^O}WKul(Blm613?y7n1DAYizv2I5>a4=zTC`=2yStMF4em4^ zJi!U>?(VLQ1t(~5NpOO@TjTETH14jq*V<>Fdw7^neEkHY$E;EH*Fm0!1_U|@T}j7Q zmAI4I#W9EOUQHMT#kL=7zV2|eF_-^C*%|d&(6;J!>5spz2974iUHEs8rYu@bH0Lx; z%`xkFR-y1E7I#0GA-M9f7KF6LpREwHi_YyTHzXDb(+3yH9jSewNu;{VU>`D8ZZSY& z#CvGST}lJ6oxgelDbxl5O@l$S4HL%PUFMwCZs?=5%uG_ubu}bh>igpQ1{hj)#KTuA zo|kVwS4}ZOqg-&5)#D>ex5;Lc_p5KYlJE-)0IbB4wv$PLXJ)&QW(l%xE8DJ7na$hi zT9t8^BDOh_!kd{TTecKy>>Ly9v27*}B$6VKV-u4kTFv+IZ-0$35|%h1pkr#iFQV=^ zq_cmPhaYW)m@jd*O$wY??qKG9W&Zw#UC7FPlg4h|gTlxG^pk^1E~ieiw+*qlA?+uS zw_*Az8_c)EC=9mPNMN>o_sfd$4>))4t-_aXZL0auJN9}txZ~WqZ!1-I3|3^|2?$?-i{7@}I<@D4*P&$q(Ug0O9 zJ9?an6Y=Rs;NVz<=r<#jNZILlgZPlT-3`abo{!Z1IbX1URn)M2q&fpkvyh7~_ZKwz zzSz7}*{}6(N?~ou|5P`ZE^$@M36)Q}-(&K^FVp08#=E zqwUS{Gy*3P^H^UPPEEPor+%hF3Blo%dlR{Lg$R1GPZSuZ%Ag#$OY52cy1>8IsY&ndQs+cLAyeO0)`$6B$U0piIqY=5JwZg3fDT< zh2UCh!gQPQq-a$lf26%=%dZb!gaCZz7LhPtjYni9uiI_A>S}k&Z7=G2a%}=f%tCX% z2Vy0?Wx?D_S^a*jELhAypnN$L{+j}qa@hPcYH94R)fP7y{b{|j4rWx)YA?ebM{Y<$ z=u&xDc2UY-pnCt4)m%^alia38rB#TO*1)(*S8+n^1P4Zx{0ZluV%o;4k@2%2W;?0i zF6%U{MZ1$wka5T_a!B_Ny0&|^K6JYRMtx~^d(xy6$$Cn8X*_1MT2d^3oT1Yv#DP84 zG6)}4h3=mcqnNor#nc{79dLvq5GOMMangAD>JZ+^%S$;NMl|PAby7AzCk@7r zRO5@sJWis>ig&h_0Jk;q~@M5m)sVvIV@M3yR_D4=qYQGo2z`@h}1lQADidP`DR z_;fzgTtSw?0K6?yOyj9=3&)RTB>H#ux97#QcRx!x1^&rvUkd|X6}yd5oQb9FUx^{b zpgVs}WnP8623R%U<(UvCe}vw~TQNI&FFq$h_5c5g$XoXy$MLLKB}WC{ZIoV(itww# z@@=)dBUpLNxIj9%U$s>e{PRO~%IR$Nq<}9QpI{Z=8X1AlszQ^VJ4QG&0WM})D2HVF zmeI=4Y#%rn@(m2%hgTP~|HFzH4ur$BPx~njp8~Tv@YGjf*+^Nmi~(3)4OO4~D}HXb z%iJ<#!z`i3*n9Ezz|%c@!!9(=&Lf?Bm1vKxY|#QDSj(#tE?VsT)eleJ|3#`bn5&|i zUvq$SR61j5vVy$bVk7EBE2zfZi6gXMN4gJ&U7`7>TEZ)coct4JTO zK|nBO!`Q&MT0!-BK|Msnl6k588o?KQvdekG_b0!F3zvHio__q>7wu8juL6oCg^E;+ zr-I+7^RZJMVZh@rAqE(ypa~z3Px~kL^rIaiZArCX1)r&=P2xUWPqX(y!lEJ_jd(e2 z!tfhs+L1!XbX#M8usy-`KOQWPb$#1i_&R)Tka@-^L(FhnOva)+7TG+I{)uo(LJk+1 z35tf=i?0v{4Q>=hn?3uVY>JIWW~tjINRDd#htul{WFRV}u@%GWIE>`~gP|j=l@^JC z;OzSA65@0Xeekj=$D4`_MuQxa5NSIs|#NX=i7mlJYD!y2ln5O^d?-t z;c)qO5L_v$=lL&?soIhOdo&O~T09z>c7o;`$G_}7Gb%qm@?<_<2r;cbF(ZN!jyroB zNh$-;mr z_NjOE5U0!>r8$uH*>{gDa?*t6%c&XgPaOQ*<6c!-&ji9}p08Ieox-Vgaj+)KiiVoj zal(mZ^F0U(rHXPZC3-SKcGI)nrpCa`F{r4P6vCULWpp?{4PNA z71Yor#o1LO8F(V*tBS*<^pT^!jm^kr+@c&L*?vix>&jKT!kb2j35K^BC=MDE%U$hx zasaMa0kiyQBW$OuU6!V;=ey0Pd+q0I?aGxK?pmKUf zV!(7I&9{8ugw}#M20vzcVpv^HwS_rJ844xZVHOq^*3GG&0~nJ79nm=GzZ>&0jmzad z;Sd5c$G>%s&QG|6yXZ>^8k?{fEg^PZBvEw_9duW|hMXnxY&Tz4=G_Nl{!K6L;6dV; zSaKsGG|5j$?0bqQ>plC3A+`0NAZeb2W6f{Bv`!QO|%|j&YYw3o`g$KwciHJ{ex^ z#RGJaq70|J%Z{k!D0or}uw=owVsQoI?eVoK-6+@b!Kmulu+E2%siAfC!gKK+qY_m( z4i;ifCv`dD;e<4-$^c589>GFcetOQc3b;2 z8OlLnM`%eYy5G=4McExb%?r6VV-W6zA7v^r~Ys$7*-qa`MuEf3VoMXxg2zw#4-+@G57yTfpO;xmS=y>Dkb2wad?3jy>C4JjVXfF$f;6@hVL3yvG-ROeAa0DPs7#RD!kNc; zX9^?~VFo~5g9Kw~dMQDOGu|I#ZksPhZsP+QU~2BcdG5~=s`ex?kqdOW&R`hTQ>~zM z#f7}BvWnujefiAST|K5{TId~Ro6hbdeA3I?6Em=8I3?K`SUfW@~60yl}pAW;|pB`TI*j`mzn-^=(@#kp=cL zcSEF3f+|u~sB{F~4vo8+YK`Dt?fHfbNPkXtN_uSjQ`)8z7ZG>0=&xyR6{R&}WRzMz zINc#=Ije#-+i&Cg!JxXh#?N{=^A%Dsziq$Rl$bCDSz=hmI0NjYc#4OOK89Bh^enqq z|M?|E$cfkr7cGARc&bm`Wx-hItyi=+a#t)jpun6fMQVzAbjq9fh;{N{?%gn};jDm* zRPKqE_P%Sz@m3=rMc|>gA@Sn3f&p23LB3&V zfh7F(nc+)vpUdQ5;KTf1rHpDxmR?JZW|hax4?V+U$a`a{peoExi%A%AP4f&z4=X;K z&h^^{j!J%0z=?vx+XEG3qNn3a5<}h-;D4nmrQy@faTmUlF+f7po7~Z;H9wUyQ^THy zKh3J))j;#YJ`$(KXHZ17xi-kPMr7i_fWQFyafKhE!YkQ{B*2?KOH*oTNMRt^Cpv5{ zh+ZeU?dlBa0%I4*`M~jJ6jtk|ep%-PW|B7Zgv`j=&qx$1cQWZ2`=E~^Hc6>$ zXrcX(f_hx0JH$ZtKe>gE_Va}UcA-nfAqskkW#9Ujd3(Xt-K91XlKxCMtgj&>PS48P zDG&`<8zQ`pF#yP<9<&V7Kexf@o?h_FSCQg7)|j-hl-k)Ep5jjO^maF_xS>~(nzGs0 zOlc;>Zb#1)(??61$Rpfb2`@6j6F*T zs>Knr1W`*t)blysO}Y&9M;uO%Cls0dZTMb8(#k2`C{6>y68=x$7Dpl1ni$8L>!Y$6 zWqmT(GRg7mP~h!GByBi~usDt2GR{K`_eJq=?nty$ZxCjluM78Rf|29;2Kw8I{a)<$ zzK%m^r;Y~T`98>gu8jWmNg(s#(!%!n;)8jI8x|GsnK@CD zAdb4(t)>6k8nQg?iwCBrI_y!-e0)SDd)*Q`dA~v@dw6ln6n=o_zzvYpf8zTMB9pK_ zG1~o|2{~-*XFlD@`@YZn`nFgxbolrn3$F;G`tXc)?e}9Zc32B`c)fG`R+(X>$e#v< zDz8;L=>vGiDTli+U#1jIh9IY8_j#!DoF&TR!dp=Ol%lcNd#oVyZC?;M)?6ofK0*VS zJ;<-n8?^IOKhOj@&TwzD%TYd3)1v#N^^4;$Q7k4G6Y zv)GCK8|l@CX3H0I(%B9Bu&Zm0kMEA`Bp$31JBYGV4@>-9{_XhnIhEpb17%9DQz&@n z`jLqO3ntzB6HE6`ei53lH0I-4VROCN-P`NrY$ej9(Vmy?XkKUP5<*WFC$Hpe1pjDV zZ=a`DH7y~Xe31U4Ra-7<$XmL*J|&+k?Bt?&*Mqg?sA%=QOvQE~f#xVnSGS|#&_W4f zcawHE*NA^5oZX|>0`f8hPW#Vs zGA>!K%BdIcyvc8JwkyfGSu&gJg&cM2C7&NE3({X4b?ROSwe1DaOOwtDB3mO4&AQB$ ztDqG}=7k&unAwQUPPCM>M2{$KGX9F6R&8=8w7WNvAo;mDYm6hj%2=A7NCw~LwE%F{LWzs%zlwkJ$6<)#@O(Oak8AG? ztleq4dfQr)-;Uo&WB)-u;EqG*4!Kd7=`cn8`t^(EpNEwc3A=3X=&0}csIK|`$JaOT zY*zIi^|S5W_1U#BX;GYC;j_AihDM`QY0K^D2H)P}5dA9&7NQVR{0^MpzT*s^A`Q~>L786rt3v4lEYF9*6ZF+RL|B|$Cs7P z*Oi60-kdiw*0(#8wizdCVTyJHJ%X&)7Gbbln?El>`cK~j*&5|^1;2@CLcZuf@v}d_ z(f%@h=bx9rtsZr*N2*=rmwG7}$8udPJQn*ra%3DfvB7Nu-!Mb^#q}?l%H2Bd{mXrJ zf56_Wx&bJ-AfX9MEh1BfIhv=@^D+UP@$aYo_p#L3@I%eusaw_=T|Te~+7T{z{F%%< zU0kUaRde+MDW!`lp)Ad)y8R7%4uS|3=k&sTxs4IU2i7b)#d&c=}l=AR;Ts+9Y+l6yb+ z@p$sV7fi6=O|H7Phl>lhZZ;Gvs}My6FGkVJwjz5eQ!|zNK13(O5c!;NJEqB4m3>hu zFeuOA?7Le`b|i`1tXH2tiM3CA})`odxz6>%Atxc+!u2lHI$mibT51qxt-v2 zP89k5Z?{#lijM-o)>;*RKvX{gE@wbuVqQKj=B(dAinA>K)Xj2UJ@+#sMF((Yi)ae- zOym^J$lAE7vm zD^=bT<1+|?iV?w1_jkzDeH&kl6kts8y&&E>DFtV^>HNP`+tWt6i;ZYt45r=~mldq{ z3qj{;%E@Mr1Y*Llq+%?IHKAAmLXXX-YHmQaKh7N!d6-Ft#pc7l)?&3Q*M8i2guJZu z*2TP+CSNTtE&2o1Z6$dF6HzQT=ab?3AWz!ry`CKDjc|(m8;;ICCJ3bU%2u)AL$rRW zmayWfx?o@^k*dMgHd0pShu$mD876v~Bi%WXBX&NK)4w_1*>iE$**%!u*`FpH&=qm! zVEh%V!5)2d*=*v|7W=I|AVyMrec-Jm%h6^@$@DZ6wg1-`TliVX;|p!t_}`$d8|?$d z^02!Hnc}z#iM71`;zf4+P*SOU!`m;W6$f#QjD^O-HLo}AZ!j&afM8)N+PwFoPO%Nl z9I*|>9Lf8*fY|Xmk=?bN54)k~0kF{qo(SJK2YOkcO@^qS2VxM#p`(W+0oGTQqD~`V zs9xhM5Uo1lJ2%^#)VI|m7(B=jz#@pcm@()z?$|bXHXu!{4^}~F3qT}t(b?4Z51n;~*rHsoRm?O49Xw<^1ZtG? zCSII@I-Rei6CZYQU>d#tqJcuEP86pZl+%90|8~0Z{;IS2;G_6LG|_P>{jwh=`fxq@ z=RoirV1C*GoL&`c!ske1fPl?D@N;Hof0VbUHe~u``t#czs|i{(=kQ!w+jA#kG+&$Z zdwpv4ldfP5BbPDzcB`e}d->E-)vGguccm{=%|Jl!BYfqhz}cphGoPvyqM(2JvmfGC z3*>_!!~1_8cX1h$=keDZWC0?ZscMhUS|%!NX!V>}uIm~+x6T;iV^xcrh;q9a z{F^qmQn0;~`%(!#RS63zq{qqz=iwro;^M%Zzf&;L8L&4?u#~Wx>T4)@Se4Jy{Sw7i z%l^z{+uTm;HHbl%RQhzZV6O1nTYXPImISfMzY(~IvA!Z5_^wd4tp zIzv++TZvUcTreEzF-jAz9t&W<~(k#bbWtH@=lf0C6XNZ z6*TD1A61st?{nbg^ko*i%SCwF?6XJOBDgV4}pcFyq`gi2kt~YcRj9FyrX$KMlDs?Pct@od@X2^e%BvX z*vV;*rKwz$F{$MofPE#DR9>RIx_SbXGwS4$3p9imdu>z)T$V7)uZQ@qUhm&-3^iY- z5-soF6xlbkC?SKl^PZBe^M$O2KrVD9_fMz5MjBi+P2$PBVVf%G*sNlUD`fbA-9mHJ zD~ePn&L!Ab41fEJ;Ss~DL`hUG5^w4&}%?=TDphYzGE*){0E_XBGNhD2mO8w%9zl5QEek2RJ#5`;gx%}>_QYXaOb!9Tqu zRYboRBgLB;|>^;V(niiDOXCX}KDywUyQJ^Vj;j?~V1%Dp5z-PD9 zG8@$i>&R>cuu_%M(6Rf#C36zRiaBAFMd2!?jUAaF3@aOiSB&q)&xo0k#Riwd?Or%J zV~A$S4@{Q`a?;@JNfOhK$&0z^qo5^+0=&2hUM;6D7Eq@%dmn&7s263Ot^+ESZ9m>0 z)-gNaW8YtWHhL!v{7^gJe)EDR^rC`)O8|DwlqUVIGEDi1aoAGiI3VB28l3uRo2zYP z>Q9ve$?v0l4o;oi8dVpT)Sqh196de#r=~YO20hpN9eJz227PNidt0-(jPrCLsZGs{ z4$`&3zg{}uEcp4)M$)0^` zoQ3tKH2lyAA6(T`{c9md3k}azztSvsMC?=N20Fq0MY;=TGWV3Qv}K)+8_bFD6A6yU zSsTSV1BtcM(3;|vg zJ2>_lmx`&aldD?jhUYP15O^{8rdV_Fb6w~h8_w^F7RZGr231E~9Et*Ef!;pM={kn& zB8do<$nI+VNs=0Qe`5pHR6>rDNC%smh--dhr7@Toe*fL2nTl5X3063jfQ&(hYD5B} zCl&AV6?;r`kC`D6MNa;v2Nm>vr?ZruAw=? z@B6acWF3!Ok<=;Lf3Rm%=Cb+)oK(Mj@~&utv?o>?c6SRbdc9#NgzJH_o)?^+hpjT# z8*##UCqSU)S9XU!gDS&W3r&##B#iHn|6o}sMq3f%^Bi$LA(P~Mv0diaHa`-&JLc}g zUd9A0KXS7_&4VM~B#d{>sHOEK^_>Af&q6j|vikw;9?)mH>%hJP*{{(p5huS_2ER1} zpN#|@ZAU!Gv_()X8YVznYCF|FLu_R00C+@Rx&9YrJ9d{o3r+5T4+L^2!OB7#eNDm+ zCQGwtnnLZE`)JQ_GYBupiQcamxIUf0L*RP^yopG`=`+z9Gg1_vB74~nL=R62*cG9e z?$hjnV%C5l>?{Ki76a+(mRa=49WCw6qY&uMSBL(mw8uNXl)E3kiO&lTZfj?lnU{-D^=YF;3t(gi-UYnDxAW7;?BE`T{+y4$?I2c2VB(7=jb|j*G_nG&OK4$0Iv$@#pj?`Y}VB2>jkF(?= zu&Z`?W@ZU=5;}UNfR28p7{?|HAinVieqQ`ngjSU5pLs_OYXHdYZyx8m{mJ4q6pGka6E|#THUxJ zF5x~r*tW5RRoCCFTAF`+8a$nD=W*1u#Y}%;;i~z*C<|oGnwStT&L#Dl-2VpM`Af=; z*gncRY@a2aNb|5G+HvuwO8ZvqbVOz&JyLKs2`pOnv)i5*8LIrdI>eC`vD(Lzfz4l! zMxqXNtR^NjieiE=P!8CioF$7)530^OS}tZn%9eMWp5LjOc0ydOtIq&Vrp*n&Fw}B> z<*|~`JX#+1bsjI!jn+QG72jTtgjr?{EC=9noAJdbB8oc4UerL!C`_w90e%V+DYSeL zfc7G0XEv+I1vE{J=apP@dpx&S$`POW^d_R0zzrvp^X zhaWjq3Y7HIVoQTniAv6&B0t&k-+l@C^ym{ceL@b-=?WJo*jmWB9dL1Yf!+w%9@sz_ zc)z54m?L#Ce!0k7?DS$j)8`AHeRv*}i3^t;Hu|46*sP6Sxsc81a40PRu*w16&hnv$ zY5dM$Sk){^x*=(jW?$pu$WjXRx}f{mI|ZF(tKU22!*}su=}-T^%kMvH@aqUk zfZO(LG+IbIy68P`L%H8f^}L(2KI#252AI|Tr@-LpuKJTq_potT|NNIK1qeJ-^9Ms6 zpon1ib|r8EUWBhmd3P|LEI7SDhQFNbRhddcnoRF|aaabQDW?MvV~hEMsBM1CX1)7m zAhU9j&afJtCH2-}^$@KPb@w2SCrZ5>zRV1tUM5oP=v8)5p$Xng8HMago9n@LwlHIr z>S=m$Gh`+YD3=n~whF)_q$$W3#P#A+9{k!Ia}>$dl*QlDfKl4ws=B8i+HwGiGUXm!u7KVb`c35?GPhY}r zOD!#lukIadZpf%=-n5HeHZY%`gjODdY}vQTj{BndAe*5i;LwPz6lKy z($|)iLY^W{nzAiiTJgP&M)6Wn>3w;G&F=;=3H=awQMqq?ex&wby`wL;NG7(y*9qM( zJ|Av0pG~?*OcG0~{5c{vrW?4D7m;gme*q#6A=8a2AT>9BEnnB<$)lPdBZ*)hq22D9 ze*`pGAlvKnM?{CT(lZW!F?UBZ&S%+CUlh$0s3TYq$Na#33C%HGfc^LZ4zrR{Qs%_1 z+C9sY6^^cPto!nm(~-)Q^lS|f(z<}T^;Tix>3L1hLa7tfJ`!#?(nhh^=!@s>%MF)7 zfYz5?k6RrhJVEkAC`W$t(LByRszh8Ebn(~0P;6tvXzSq)vf!eQlH3t0TinHqn2cY< z!LeoY-M~hlzY>JiiZzX}vNZ_-x51XrFbEBP+7A~BtSFYFN?xw_yzC7N0z@0fjYKo2 z=nMfd!}`4;`kg(Q&<;kY#rN1XxtP=Lqf)m++>@DWNMl3wO62+od)=W*5NP4vcuDaR z7Xm)xbAhK5=I;1;^bOx>Jm6J@THzJu(8VX;v6mW;LKk@4>S26Ux+7{R_>R;;#)E9& ze}~ZGf5XN9dT8#Lhm-!4==R0gqQro{CcLq}U-nSNo)Wp57bm+uzrA7lQ)F86v< zTRLx)>+d3@jx@tuE_+9PHwU99MkF=WueuZmLC^cOviAbziEBpSPVl3cYBB^^B^HTlm>p8~2yzZj*?(B@WVG@wI^*j03A z_Hzvx#*gL*-Hf(VSg@&b!}AQ<#riyY1@x&$W^|L(^txmgbF~eh5bci| zYnq@8PCH}H-n;VsQ!B?KR{I@1h{6ar-jEbucESHl-M4ek+r8Au*OAyX5fA?HY+;nF z*o`KilY51KSJayGy318*JNNC2EZZABzsK`fx;TMw4T{EB9+rLraXV*}{u7V6hxJ`; zQl1d%AJ1<-6FZ&GZ#^P*8-9I+^HZ7XBnCA9yXjIVo)}CK;qfMNlDW@|1kYhE4(ioS zn2B*o4usAuAjGyETA_&i84QF3dcyTuk&*e&^n*<>vUpo`$iN}Q`T%Hg{*LIpUEs115IW~Qxio_jgZW<3cvDAldpb%41!m{{0h7qU8^Ov2 z)GOL~9|!5Vfrt<6CR`)qu)8}|46ZJntt= zcOd$Qxn34X&uGbKiwLGQzdczfHEO|#NmFfvCz`nhGMIQo^)%(d6LA{u-!(a)tEOHv z%zTgl&pxkmrA_Cq63I1Cq8;lh0`c`yuZ1rRhdzq-z1A%*bfqnWt0-A;eR=TRLVG2i zCs#Ma26~JjvBd)C))EX_x6he%8(5B$ZcU1!j;E%#m6DPZDRkIq4Zun5F3XnlLMV9@ z62V!*bX7nJtMIsxQ9-vR>lLNTo0&SUkFGj{M`Xo_ef|#DM@LPg+;4$W%J8)c@VMdO z>T_RcSaHp?lgaAULW@gS@V+dSYly2SplWIHxVo;||AAgB#~oN@fkYCSFQ%?h9@-t3 zJgYQ%^EG-t2G%6gQmF$haf};@`&O*I^_WlG3!sKz@G@rW?1t>hE5zl!FlOEMl;)C< z4RzHrCC%V>AC-n=LSdTo*~s_cA!ThtqHb0!!&fs(H);84Mr`3l=Ji#BvU1s3II)?K zQ)(c9w?NHH&a9k%UvOY)$atg+z{-gf&4B4@t+y`{FihhM7P7ck8sxLE-3t<`NctnLh$T`#=Se?Ytk%<6-93p z&W2u@gQY}=TpaN;<$u(NfTx~Lb}nQTsdd$4*v=eZ`^X_f-{(5UD*f-{c(mD z!odlvQqQ^smY<4bDn66&z0|d0SIr@QtR6?T9C8!>qL@QWz-el~?Y7-H&d!w7r5YqI z&FiUr?$!lv=sq{W8tqy{?A(A(oq>uQp#DyVqPg02NWgr-?~2q^vN};}5LU_z`I|Cz zb214$cFJ(~%;LemO$ZfcX$ey)j{58B|$5BkW$#l;2To1{2*`gd?~iB7VKzv?lijS2m{nG_%dPk z@jzkS!mM~urET|sJhKBo261ZYs|*Czt555EVfRc0Atx7($^R)y@bqJfCJ? zZ1%*PAAsx-Fkxw^BnFN#N*k_}Dg#>F_!Z96 zfPN#=hS!muR)!|DnF3=vQpJ8Q-4!R^%EHKE( z$$hsF>5jjfF!lte9%dRC%f<1sT|xBUww+p{g?LwPn<-535}B8D$-o@ zT(J;+=Jj>%+{%)Loon-j0vRz~qK-=p@)5 zY2dyCJ^B=){GAxgc|z?r{^__gtY&kH&b$O00FAd?bJdJS`J(vH+-&Hrp`&!V7#V-O zvoM)g4ZqzczcE^%!e}Bied4MMEEK&M{c)`&dW`EJJ7q`{$lj~gf;7WJq&)a;(JJ?kXfiD#g^$wIaejqX zWw3HVuFbI#ughN}pb}x(aj6m=V`y6q6bH!z`Z9mQLQ^TVc%XKzK5`LooYyM!Q(AH_ z1)=B~(va&K|i$BwTqO*L0F%e0-(PBk*Jd)~3MkO^Ns;QZY(I zF8Kk;ZFn+T?4?n)9~nnNGih1e=K3dR)x`7@!hfz(Uzw^-=IW})hZ(02;_XrAu?dJ4 zNAj(5f$1>zsbeJ+tl?@xNC2N8J)6Is)rnMu5J2!tXn~1V86j=uO}Xg+#_R?s7(7nF zeLoE4vH0c>bfTX2GSmz%xJJqN$PjbP$VhjR-NKtrS6bFFrj`#f={;F*n3^nGRRC+- z^_{~-`?=paWsBPO|yq!!o6i6+gDjZBr4rT0Y!)Ch^(@CuH zj_sgkVxY)?+Z_LYw`RGT0CZf&$s6%qDtoxh+rXiBByR^SA|1<|Yuv`3ZS}7w)D~*& z;Lqloqq$A^1f&OKj9EJcE}rdz#SHfFpcg;jg4dx01~YS?q^RMEE76d=v0uhQp;os| zqt&J{;AF@07lq?j%SL(hR(*sVy>Nt_tZ@(Vkdl2ji?Dv+g9B^N`MMaYjbTZ|X4C)-px~{=YPT*cJmv>~oK= z_-l#7T2PXH3rZ;5nC(*QihJwQXSpD{nN=4v$4Q!5LD9ys>Yu8b(srSGV!Trt(7xi$5r1;mqWEsFkSm633erQ?wB>q?=D*!pfTims(kju1m@Q0mnQzKMN9m z5yZ^yh7XQLNZGr?eBU|W28FgaIXg3spXvv%MTHx^M>;Lm+cK$hFlA2?8_1u8wA&PI z*)L+7TJ(QNd^w9rL=x&i)=)U_2^BwfWONk~8X+B&?+71l%J|w$8F|1c?I>c(YoE0o z#wkF6LgeQ>0y%?qV387FfNBo)s;#D!$kw@FrFj(xvdyTfOs24GvqiAV3O#xwKkKpE zZBnw=^)eFO^hXiRzTw|Izwq6>?G_!}FHNvloldYf^|}x(U&3RndmEoOvbzk16D=Iu zK}R6g1;jS68XC5yvgNL!&W@K6Ej^|Zfvk5LCUOHZa`MlMM0&a=0;J3`Cmz(IPw!-- zU$4yMPA^gBF#-q)_x*m%o+f?}XoXp>ikZ~cgx{O;Dqi74kJOvVTN0rx++j2nYNsf~ zEz1+Sz)x!3|I%PX!GOUe)_*xy45-HWgSby+w+2}KwSp)u)!3~e6cE=fw34?ni*5kB z!$L({pwrqoSvDqn@X|eF`TjOBwbyIGwwpzVeKm^NfdC34%oe>Gb^3Qy&v;@%Mzv)d zj*dHv4fp!HH&Ov6R|B$z(hIW#bpJ-z_J`R&0quwHAK2_~KQNguej5PxUIc9WzMRh) zVXcKAAhh55LpuY%G<+)#T~i>D8`5K`{kqNC#X=4y8{#URJY?2^j^#^b53k%0N;-Bt zGxSjUr6fJ@g2JVm_df2c%3XEQY zN^r0F^QH%Bi62E1kZlpn)BC$5M2ilXliVl;UyeY+2G3?5A#bPjPR_$DsH(B6PJs36 zNzRVJq4|@L{UE#TR&WMSI}J3c79~5&U&kkc+-h}wy+~{!DnrjFg&n{qA06@ z3@4*k#zojotiR*V)_Z0=UAEIYA*(Wzvt z;=|&Q(n#!#xQiKI83w>2N$D0UnZn(zND-qAai5!>qH)bdt%Db^lwQW zRA3E>+a6l9Z0_#l3=M?1qMs!CN}vCxDkcTNA>b(gObw059Rx2+)15! zSG%qBv-biZMIWq_#~xlr5Z`2$kpYpI`CeRMA`jYLP9%0Y@W!Lj`59BJru!b_Q4ckP zkW|RU7Zks+C##uGQ~&-%DeTSTj{vHti3gh*fntx}nGHwX5X(Cv=A<1ya9lyx{G|Bw zVo%gyrya8;j=24Lzuq4g@KONTu@QzS{U1K=kNyJ6rm~3ACQhB_|Hm4!A-Urvm79m{ z<}Gkmq+zr%CRg4WAHfaAt=jvJ!V^ksvKqT$EN5ic_M*zx5Jn9U*pBU7K}SRUj6aCh+j;RS3H8jR9I+? zb{9f8Tfh0p{#hl$`qv9T(C@$WnU{{_rX}}ztK<0q!OJBbWqHIMh5G|Z{U-tFQN(9Y zU0cO`bZm_7b_Co^=5B0UZe8ecYe*T&-~I-!-VQa-AXJ?Cu$jWVgU)$uLa8Jp`|3;koTeDy-6Z9rDN-lWy6R?8?rx|f0XYc72~Oi z5%mS5KU2lp0UygMqKr_>t8! z@F;U-Fp@v!w-2IHwi|hAq~;=;8Vu3OX#L<&OHk5ptR%va0xmG^a8yrT`AF{bUrF4u zC@i(+-C}k} zJ?>()t_;@89hf7P`d@Y?0>Y#fU!daruc){>-bj268i0FcOZ0qol)U4i#w$alY0Y3- zgvwT?e0L|LBma>JquEju&8AZ<41=b?mHWR`FK;w`(gxzbCvFl_x)Oc8SbbCAgXfP; zj+WdC_BnA>q=nmE>6d^ue})ETk}%0cexvm!cTLVBk-u5%F|!Xo5izYlbQ%?Ry2n%^ zI{YZ^Stz?hd&{ugj=4}c@|Ay*hetQ*MC|Qo#BFc&w>Novl4De6g{?7ywt9l*kp4`N z?ql|yU{gY_eD;eCuGH@F4tfA29RiHL!pGNYUl}?kl4oMt+c)&E`T%TThbQ!_NC1W3 zieRX}T{d2qhhwV+d(*26k(h8f#BcFP6JFx40<|q{mo3`K>K_ZO>hNX7f<4|>Gdz_& zf2K_J-3zU70{GyWkz+V%p=j*+Y{A|zGLklZ%FT4}%6}LAjCxi{64?X1R`&2`<}%92 zMc2c;E%~EF`3sXf*#?t2?CgPoXu>|^v#9{uM}a({MtnEAg@UU|j--=!6t7nBCzBD= zJ{HR0s*tS-&>)tQJK4g?-ZXC}=vA!m^OAp{}pPJ-GAAMLf`ojKPBF~70c zVDNX^^}|}>X01w40^WlI`+XarK%#quo zxhHN%mR)|@@E$?v0b3YtQft>^22kER4>6`tH>rsrd|Iu)?`brFu@w{|^b zknu-Jos}_tBHOKklIcpA?N-Jwa$0eN;$%SyrzfulcG;!%{`n@fuN(RW)81o-E3js4|X@r z$O8WbFuF(wui_sg>EINJUAS1r%jfdNtK|%XSP3ZNKUj=-;}c!Ie4e|xKl(VUkI27H z@G}fm=ecb zxSp-3#royN6_op|QVId)SY^$@@483mf>0)>sw4ZNGrRE)4?$Ch5Zs4m0Yn=Z$bgK;%4zw2oI?{}k;OcG$^GeH=wEf$Tb@+NB}7IWV^*rt z9=W{wRLblVvfS|m!4!*WTzOK<4MOCJz%rdqoqYhLH{r|23nrwm2&t#IVn*1esiy?t z$Hu;I6EnglxDw>AX`dC1TDomj=9sgV&`5pfsmFr0l~};N%^l~->5>s^roC6lO^bJV zJ$h~x0S_%*}Qj$jlacH)SS{-rXS3aE!2q* zRdJG(oa5OkmZ3QuR4^22!Wm9VQ%@(4%D#4yUKfZkfc~M`I{YYYEbuAh;&h;})^-y+ zTez0&quheoY{zT*@|$wPUqYt(M{mWD}{y_BAM>vH-vTR7n!!kN&uE z+%j2PcDIs#`+!b#-#a;V1BOnOU47fXOz*oNO-i1wdU2k*yGqj!x%j+)xj9R8^RJ=h zad~p%S%R4(2nkhVNU=w^K3K$_90PWs8;mUZ=8ft~tde+Tnz>@LG?zyM|FK-+A?GND zu})F-QoQew;9t?Fdv+yyW_{XQH5KWSwSSWE*b zGNj^5u8q{DyYKnga5!v*%3zEgvj!IO>Sm0E=r&5Qz2j?EeyR$|>a7D8{esg&Pt*DW ztd-bdb3&VHrFLi{g)w+O#Cu3)uI?5<213+_vcN8^>ND@d((3q$LJLge0y=Gon;Fkv zRQVJ4OtaaAScxtk=|2mOzzjOHN(X|T^haqzls}`u-!2r~2btM<>FbiS!;sxtVmxjf zHL%q}Vei>5)Cl_ho1n{g&&G)%9Zr6-yyzlg@$l9SaDG+rvLyFBimec2!|b&1AJ2G@ zkt?bih7c7-+8*al;QHgj%7*NTgkNIs>GlsCgpJbi;`)->cbRnkVD zNsvSbb8&~yu>pgC2?K|}eGZyv^L4p~20Edc^r5gnSPAYux4 zpZ)R~LkI-cX)k@<{XU10WY3JQu9w1yWC)z`I$Xpb@V_(aIr7IG$tJv36mw zE6+-$f+d|1@B%2=dU}RR2!A@LFkxo=#a&w1jJ|YXEJlsZuk` zX+}Zy@{Y8A85%vsj``~*3dWL4{OR4+dR*08syoI0g^<&>gESn5c5_SQ|NdHEEomx>i=711Rl9&r9S(rWU4IU~ z5zX|tuE%PdJJRpIQ@f79h|;BRlS`toCbZ)>5lK7-TXV*Uk>(I8NP_jR%IB|rNvXuZ z{Y8V8fEOYPStD5$i4TggGwMHbqO%)q@KUWMs(%(J8ON^naTk@yQqzBi8(DU%BN(&a z?=4YS|3Z4S$yK42Od1;7ug2hhCSimP@KnIlv~p!o?G7z(G*H8n{S`Sf_Nds#j;l06 zZ7G){{(M3XQE3aw?r$ia=t9V9!&CMUyUvGX;MZ=9fIvB%xj!w zWv_!uD`+;jQ~K00!?wPx6bQGVVKR41TBP0D1U&e1zwM)Pzim8~24B*|ayb$u8kSZ(!qcjuhlvoV4(zo?dhgp(X!W?hztlY z20J)SOg9^i`F%Y;pRToqTn+JULv9~hTs+7>P}flNv7$}$scuOu27Qt>mUTPkk^UzZ z&-{&#>KcN!3RsFkPQ#))KDTqoU7@V7zU`@w*KkQ$UgR&~`+XaXzWvk@%80Pud%PLk z*H@RfRpIJb>oOo_CZ1r3nl=rbVkDlRJwdKODVBi8$9>})2xk~Z}Qq@Ui!5|bRl%9DUr z=M^3`#;?N<8m+vNh=^KSUCi#EziRA1ra?JR!D%kL^Lc#_z@KD;?82awC0x@5+xxtj z8fEywAA!&~!cGd4d$iZj6z_W^S;vM9Mu=ub0U(5(4$8`_@!+Z^pg12&B#_XzBRiBW z%6L!PAn`@yoBa|Yg7}odiuK~?KeRaFL8LoU9F%)$A3LNjxI}bWPgXmt;@TCZ^BJoB zB36ViI9+WrIiwCkD3LFsgzz__o2x1aO|w)X9@v+c2Bu^bnaFmDT3Ct7=y{{^f%*0` z0vK^e2mrwa+C%j36R=Ot>3jlBROC?sOOMVhB$q@u%p2}n>lP?PJV(^>23o}wE3jkeYpdP44`{1`+Y`IFgi1=_#4as#;UHyaMj=7(*}3qr<= zBA3)iv-$7I`t*!e(c(I<#~gSYAX;XeM+LKtX1}ZS)iG1QB1LpySRs}%jKvUs|7pf4 z#2lyWN2`=c;$y`7K7gX-L_2A_MOHQ$wt{wt_f}m%${(bTUZH>m1H4~Jh;;hw{`SdE z&JS|@bNZ!_04J!0pK>W_gO9J-Z1~Q?SC2O}HQFBft-tUWjQE?ARS05oBi^tckkbCi zp3f2Wn{7-H&oo`aMMgamrNSsXE4sr2sZ;YGd=TO%6%Yzj!ohzMVWd4OmIPwlOvXR7Jzhs~C#T3qf7 z``oR0Ur=&Gn!>i}^8&t(*#DOYByavp_^>F@?HVx?XH~M*PjqDjKNt}F`bf85x3V;Q9wl(dc z0FQYmx>Fjlb~2h52oO|5jyYGoViEe4S;|!bDfSAtppr3i4K`h5CYwtz)&Pz?b6e7` z$~oy{%xqCmJ?{xf|3jD1gyF^mYZ5_zWHUd&NhuUDW6zdF-v(%yg>V>SHlq_;;qA9rPQ;zM-Zl1sUUPoAd_*-Jx~G-`2PyM@ z?i>d*+IL)kB&-@|iD$4BgCZtSCMgQ34a<&uQPf}KUA!o|2oTMVhFDOK%;QoM?~~!c z`1UAvpeF*;EPJ2v_jmS-uBE?Wh~>7))boV1bkNq`Suoa(m>>M^(~&$aQ<-M08H=aj*7l1;~ME$4$cT!GDc=vU3P)udc1K(c8nnqszhKP<2DWT8b#% z$$_Q9BbV)GCqvOXRC;grzW>nKSI=y#?LP0k!GJ9{I`tu2tObKGu~K$129v+lUG(qxA!DCXP0l)DRu@T}74C|KTX!=@*Fp_P^^kq~ z9CRU_^WJET?2^J7BY<%B@r?l2??rzHpJIBGc9SgAPj^U%qiB1SXztYa63TH7D&aW4 z$}^vWib-|z9;;gTJf(4Vjn;4!%rd9X9cU>XJ)@7Eynd-9yC*#+zQbV~I}B?dMWPAX z<}Y&0i)(bmP3o{>nfa3-HT`-}D$w-49K9mHvz8>&eY8MSIZu^UcCGH=J9##+JkVXe z%Yxu?Azp6pP+i_c1U}{qGL4;C@M_L1g3fQR{??1UZAf%t9V|kqE1z`c>>kP@x>)+M ze;Jkc3zm#ZK)XAl+1r~3(q@4@Ek|K6vc(R~ukj7jFW{M5uOm4Wysr^+GdBh#OKdzt~TKX?%jWp<8fsswdMStf${spj}*J02JDKQ1a{SyW^YI&CA7u2K*at z`KuclUHfhz(zf7xem*V%C^Nyw{0QWYt8^R`ewjR(?t^UW$N>-GdtSZTZ2SSZW;mF| z1}-m^4uyi`-RLIwTtTm+Wak3)2m<%LDQKp1hXO>kcm!&>WFc(yld#nIj_@9gTSjerWue;=jpiEC$K zV$RnTIZ9Z=kAOa_tN)09hP5A))3pXGKe3H}k#KdYe>W(@on%JKd6$!e-%I_;Qx+TJ zYDFId)bN$!4?~oJ<{&i-PgIdgzSjq7(&JvWztapwUW)Y=A175q>$oVL4t2y?(FLP^ z{f3g~i|GU#5kGS?UTG5M3ks3@Py8F$k58NS-lc_0SQ0=+3Z|&%#n5Or8rVy2cG>-F z1PA*3qYsfoMHP)H7T27Tim?mEF}-Ky2|X3!!w0k%?|L8OH0rZf8@7f$TtN$!vv_^B zWCYLUAfY^ACuAIT^anU23-DjdTE))ZsfoDTs?mjk{Ti;*?x=oty7x);u4M! z=H%}r#9;<}@>9T&oX$%PX&J`$dvZ({b#tcfp1thErd zQSwcmfipx#UH#6u3oL*ZE%%jgcVa10L$BM0w6;jrAZNUO};RgO71=P8Rtm7!FKK&>TnsUhWS6cw&rgRxao5 zhiD<#88Us6+q>vBX|A+F$WgFek-l=I%tCr@P)Tv^@9_K2ZM+>YSz~$Yu+J&lP=tul zV2H|pQWK9NJK`6(K|R2RKrvxVV)r8Ad!GLLfec!;90Met*26J#29>d>=W~ygGqQ*< zOsVh9c0~0*F=S@KM58m-(XpVn{WoPfID)t6*<=qmx0nw!yf?n2qix*@zH7lThzM^X zzQDJ*puCRQ+wM2UuIH=S{o5t>>0*Baj|>6~g*DG!bq`7r{7%8HcnZolvK-NS)V82! zvd5qc^tgxFSrq>VX zQ2Eu#!BT#;ZJp}&fk;ZV3m|2o-TK(+#IGcnB-d;9d1~VTiTTHaepMi@Z=x;L6|%4k zKTGY3@_EaNOY#bfprC4^U(hJ18V99iqo8-fu}rFPGJ%d_EW(XnoB|{9BtJBLz5Ii7 zlIsda)&nNzE6XXp?nHs)p@?|quzuOlryH-rx{`+UlD-;31Nd_%l5ufXM9 z{0+sT5|7JFe+aB89XVKOy+=m4%7$RkWlUp|DCIr}cAW1mt^QE(>xPLqrKy;@xgqOx z{>UquW4e;X`rA#-s8t~seI*$iES77~L$0dVFO9ZmLQ^vqM-Wl|RIYdlOUFm_BRln{ ze~#KlO&3KD=M&`XRpr~ya+c5#{aVhi!Y?RsQ0Ir%`W<%PUs7`#SjMs z6eP{YV1=VW839zy!E=6z9@EG!c)SwXDPI^Wzf`vQ3lnLucXdz#-wn>FBt&aaQk~2L zAYqi0^-e|ssd8TCoPl{$N2_;orOoYrU+>8-b~Blr%Je_|pr!7xz_buoW`^7Q!-A<5oE+w% z)1!m*t~WTO>h@S;59dxDSkZPrEN+Uhr)P?+st3H?)GWK&l$FRW+_~^ zlqc4*f{)LYNkT{%z`E*H1^ISdcXZG|!InjUq(xlmCvYgjd%wn8SFO*?944WI-ah}X zP!DvN`Qg*2f8e#1Z{KrpP@C%EW0$$+AZP`Z%)s*M4@`QDOfU>KHw`VdgRD)E7%!EU zVO#~aS`9J+`i|#K2vv2)&F5woAS?DIj_#t2tVqM67NJMdE(y(Llfw-bLDW>4B>Ap7$_t^rUjYx&`;i{9842>0O6ZYm9UK>vCADG1EA2>c^~m&i>KIcXHigZh13*Cg44HKtAMDP5pVP^ zVx#9`w#DU?quN+7BpR6zn>+{ys#-Zz_wvPcZ(opX{Q^J1*@#L~g>r=(*B}i8EO7|F z`Ti2!<_mE3W}AA6gbj#rvRArEKFa;2xrXRxFAHMP^NzthBvPEg4m#P3O=Jpp>@@VW zAIe<4v4gfhv5~KYxIg{;82-HYGlRdy7HQrfJXwff3s!|%tTQL$4d%hiA_V-$EM052 zoOW-w3QgzP5m7pM;~n$}CN6C63UOUC?%ScLF>{+-;HVp_NS}eSj*&f6(B1hXgM~Vy zB^>hxHzxB2&nNSG_a{wy?!DM844=|Cibw5r7w^d7B6V*4N$ruJ0i;OXiR3h5ebd7x z8^^HkFL*D1~Ea>`ebx< zcQ-gY)rqLBYj_5gak=qny;4CAVp^fdkHJRHmTV_6DTpFCVim}JCEmU9us$i*D>@nF z?Z;b8AHuAsju@*olYuzEq=jTu8b`qsY5M0a3l^d^f=)4C{G5MJ#C}b_IBQ~Ww1Pno zOME>(o3Q)iGkK?A2hb__L7*-8lKZhS5HYaZhuGrv8n3$JvVY-Y878|hu;^ghG%cjg zavdL+C@rW~Qa5D%_eKybYgi>_hmn3a`iESfoIKh5pF8rKg3L?MFr*MUeuoy|C!o2E zjcgzlumQq$oWaXOIkG9H2CNgF6j1|Yc5P*$?ik1)FJO0+xEkZpRhvz%m5(7-_YfjT zf~L>4woNy@ZjaZ%aObhOi<8Pae|Y#+FaFp~f1@a{W>)FMr!)|-9MpZ_N+74~lxIb|Y8ndfBMHRya(GZfbfz znKipAMXeW)ul&NP^@d|Ds^C5Ax%PRQYNy>BSK>Ha*(@I&=H`2qk_iCHRi#Y7;Y4zl z865K z2YIKq@rPQU_b2T9Br#IW4cKWWApIQW`y~iG9I*X`+-8i*7e<7_3sM$cagENq`1u&$ zjmPQyzdnFZPtZTwYO7OID8c#0J|PS7!+|%LJMlZ?F31$U!@i*5KnxIRH;yJJ%PlwD zEmF@8)*h3!u7j1XAr?+s=KKIAJke*CJ)TQH4l1XJo(`u`b^0TL!@?*t&el8{^LhxQ zPuxOe7D;CxG3@8O^yaC>>eec;YZtB6r9Lu;NEpS3ETGnm0<;_vcTyjyX9}nD!wiiR zw!)Mw`^BVpPOju*yW?X=gdycY2LLEkdn=G~^{%=!RLd@Bhp9Y9qZ5=jKceD-OL zFy(xz3`8MLyuTRMzC7Wt*Ok^t550dXXmJY?>MLX~&^E$RnqC~cgKReC*c5dcXvvh0 zccT0LMZlAd`gkx1CmrP|Wxc~_1JiR?DgdjgN&I}dDl_33Vhgc@eVLu|bR)CxK`txY zy-@mo&h=Up0!5w{%yVL9GYx!ky$=y1kk1Uu{OCMV-Qk_%dDuR?u(;tq%I38fOYLng znWOuH$?`-@c5l&)pU)mmo5e7Ec+}f`zx?xlCv|uiX=FZ@qntBFug;TX-PtOwVJAVX zK^kCltIm_(-$g)D)p2lsN@VM*cl>j${hIW#2_Gi?AGV%|zzUauD?G;j`ZhT5@}*WS&WH3PtzeTyA%7KYMV@ zut`^BMx)Wx%YyFoleaG4hKK{vGn`k#E0J7D^H7Ffj;pJaj-kOKE+64tFAyJy-4Mk8 zgEv8Z`L{Y`m5X3$Whg08_U}(K`SnH0vZE&|`z!37Ga}HhwQcndXKE(EWXSWP10-=G z>>1^RG8bv7cewPQ5ZtbiT%pS-)lT(ER3mL5_U8)bQ{|KBQCwa1XUy`%ii52x!%u@yn1LJ?3PSSJM)gBDqd>P0t1j*2F3YUv zzJ@kMkMTEf{9}GNY#CBoSDrqjFSdw-m~eS0Lk@qL%h)ixG#9Mrdylw#C(T6Qo$g;M zA9;ElwR+FEV!bHTyM3^0prP5-l%}b*dI|_lQsV2@xhRc&TYvpO;HJw!RD+5@&j zY^0R9N=@LR5M`}WLk9eo|7z}nE!EUWwfVCNE;@)vgdFG!YOP%+n9M-x^eJH3DtI3J z2WmVIBbfron}JMn#E&rU6hf<+osWcmnesluDuN5Bz0GX>c6^7)zw4(9&SQ8R)4;SkKdA_p-tWY{bK5^L~rumM%H zBQ}W~x(ykIl(M6>bS(B9S5gf>vXcz)^RnGN&-^)~;-(c3XKVNG_cxwarBIi-OLL!t zF5z7P7pnY^`F#g>Cfu;N`6Bcr&H1aG<9SM?ME(5huyBHpP13)evx*DD$kI<#XT zdx&2Yo|thO^2^Ux4;41${O{<=;C2!a=lf4{ed>MT8n99^KBRk8b;x}6c4fc)u^vTi z&3PPuZ1L^gle0B7ms})MDxz02C1$+xITDGQrbW-cBjWVhE5Kt-Cv`3yl%JyGm4xoB z3v}}_rX|-f!L1n8iG94}lkWfKj)=D(L@HtegTxHku(H`EPL6=`PLpL5>K**^B z03UMeBvAT7W~UPYGbhLIGmZ|VlO~`BH7~EvdnGUZdC4Bj=2A1>xgOiuKJ%|JJjPXu zNO_Y82?S;8!v$Y8`|NmnENAkoe}9NQM6B9gF}8bWF#s#}XX z@4BVV%zZ}93_cI!e%(iXeCBFA5tX=v{A=M47QA&_jcK(9nD5tH>~|hx__3fDoJA9I zdaTioExW>H0rGl=(T7}*_H2b)T#0m4tEBcrtx$w&(b51_1F06Iu2=om*^AuhLTL&8 zX3M#2&G3YPnxQgsP@VUZ?y<2T$v1CynZ@aw7M}oer^plVT7{hJDeO`5uM>qZ(MjBA z(bZ6_nNmo5vcIbegpC`7ug91ajoux$Vix6k(r28#-i-z=eT{K+ueBf0t+W;w4#xiT z^{L|9ZSSv`oLZ+%H3vOGBv|ow;0G!>XOxukoiDnE-x~>TTW6etNmmH5h;bimH=Kn; z9v?@|5IaZr6MJ3bq;s7YBx;fSl6~=W7iNNz9Y|wV@S}$p%(t~j%9n=G1Dprrbh_ff zN?Q^me=Z=40yr5v4^O`fH+nqqPIJP;ax4r2$lRLarT^~rLKeA0I>g-m#EWlETrjM1 zqR9x!6_~$$f&i3dZbB7GSA_tn{)4~F`5vk{8J0xRTC*alEIh^fC(dG5lDm2a>Kt7h z?(^#GH~e)M?pRq4Zb*J&zOoWvzTwISKad~=UVMo3eBeRyc%-G94xA*uB1ZCl#`g=l zB=ZZrr51eNr45)q(C~N_jJi=*)%`LhhR4kd64QQ2hC!o`0=QHG=@M*%`V9p=OF0SAyjyysXK6AaIKKiot1>zKi!d1lK}~x&TcC#?J4+EE+3v@4Lx5F)+@8pRZb>~ z{F_RvC%HNH4))8SMfH;WDDe$2LPnVagPokQ?@RL@@t)dTrR;=?1+7yNKZP)RDH}m+ z95J=KuidAnE2>g10!NNd*7;=d@XIxj7-0P;1OjbL=27e^&183?L#}%}ST_OZxrr8q zfoUqgQc?uU_1lp{+LKs#$0_qWCx_Q`S7-UV!5b(c{PJN~R z??g%r_|gDxk<0EX${973=oWzrQ{L2Gn17%N=MLM{d?0+lChqNd&*#q+FQ?M zg86^@aT>DjRi;;2=K9&1k+riB2r9_EqR=T43E`N&eIHxTAR!S*bH&Dkxq0uSM~1V2 zVdZ~0=6(`D0|8Pe=Lk+ngFEm<3w*x{KDGpnQ`4$2F3#0kGyio&xbB1*?uMMgR(?mp z9H6rlN>Pv-B7$~uS)Wzw`v5Y`nRlQ%0$$vT$lWR13e)BT=^a@;Pfkrm;tpf1RSLpJ zK|~KfL^3wgFKz zL>{N`YQblky%trWGDhK-*+P(Lk*K-SD+a~(_`_p=$QRV&SK$ze9E5fzti5HHbWNGk zF_84?yoh{99p%Ufcbyl7B~}$i*urAgMQLx8905Lwd-A4WJ;I)Z(WW2NlUw*kqkzHb zbrbho^a-!=tSfY;R|KcQj`P-0H>$P5+w^#OcE5QGrj5nKX^lm$!UQ)OEz}*1k!QE! zMY_{BZ}*4N@_Ty-Oi&tzySGC(2QM(AyF7?EbyTSiXVj%5$odD!L9Q;l(q#X*NK0f6iuJ>&+Y`lQp$PZ=||n*UgKZkE)A8 zr!-`l=bS1^5AcN6-a}0?FRtFZ*wR&`p^KP>-)n6#zBb28?N!G>mV_{Uk@u+HHpq5P z@0)gSf;`w1vBt%x|G{n$0knwr!2;6-pB+ci_~R<@a1^!w3dfEkC9s2) z=6h2$-zD#@+(80o>7oc=w_Uc-0V?v30!;T|2y(}!F{ z)R#hJr6ZjE^%`FE=p0p0>?gS8sPtg}@22G{EewulY~F`AO0SZ;EEI0P8Kc|2M9`;UhJg| zp<+d?=JR%tMe4&~&tno(UK`Vf%u+0F1`PwaZY;0%j0x9X_Cqes;?M|WB4f3pVJUZ- zCRmU6$dr%Crs4xkA*_Mby@HnfI}^z@^&RB-mT;Xu&*aP-Y*`w*Vih`Cjr$N`1VC7; zNM0^p0{ZY?S7sW7azck~!YA$VYgeHg!*F1;OfsK%>g(u9tFQqosm{d1shu7R4ODqY z%`|W+6EJPrTR+u<^$-iQ4@knL7G|7I zHYiiBP>>r^%vaYAb(aHSuLD5)z>LS9jZAP(ZFx&M|CiDjz2eHE$!zBqem;dBvmjtq z8+Vj2soQeRhT2?&tw*+$x*WRc*ob-1Q#n4t=N$K*91jX8 z^KZc|ABS2t1oMAi4O%NXez!e%bZ~q|qRp5t9f<)}vye_Ke}z4dv9+L+<0WdC{7F6Rqguup3ooDSqO|RS#+W zitnMVgMPmIaKJa1=WBS4NRrRfu12*$)2YcSshY+V2anf)meE`PzcJcL=c@d4DV26% z3S9>;A|(Q%VPYboY$u-%W7q7$4;S8ak>Ko#mM$yO@PX`Rexk@sIL?aLaf}$!(n_3j zUL>$~v*g~p3J_^mO!Tlhjq%kUkLASKXiqv==_<2o!3yVS!V84N+n^sE#g9gxCd8uO za*R~h#iBXzNJ7-eH8=k=zq%TaS4GU`Z$W*mD6^%D|7z_rYpJ@XRG~)isfVO}op?20 z{$IcxX#X05G4!`$xbqfWh%q2(Zo9@e-^<3OPC5&_!sU_{~M%8ok;(i?y zK`Jgy=SIxFYD((;i1-n{!l2xRAft|^i0e#s)r*Q(v=$+$Cpo$DzWED!R>6CKg}`y7 zA_R>liNF^P^)8{{(>4X3Rg8S17%2Q+qcpjsr32)d*{0|rwj^ z^xr9blTYc%e0QdGRJZQ`M(DPi5_vghMZpw6*uHOEw+=?Jdb3DoXI=B~J43R@y}RiC zu7~F&;S}rPE6}@*ncgP;b_-N5c4DARp(t3#qN`^s#)!k)k~FE|u7dM~Ggz9Gh5SW; zSLrxTd3)LHq!z2r^LS`c>_Hzg!{`t9Z z3YlKWkHQ_fy@KP#fE3Cs(B<|OgfRd5+%SbKQ$dt`0J)^_dWt25JTvse#Fq|#me|`k zx#@qy467m%@3tI}`m>PHC)c1$n3$J&Qvy)Xnx`*V5x&=T@y-18 zkh_enhTRrAq>C;(|a4hjp3O)a%kndZecZ~o(JI3OYhXU zM9Y|2hHt)u7R43yI$!x#m|tbT@Sq##v9 zg>!023AV1=@1FRph7d*yF;Rot-=p308PPT*OpdBMIX+lE{B%-pw^?VmQ6jMfMJbPY z7-vVvjB^sTruW_-2z1Hw{mwV@ZV;h$?H8W!rxwh}W|OF~4P?cW=6@zMYlO|C&Z)i* zB)XXOR2;V%{W~G+9G?%$_xYjzE`7=;Tt&S0%%*%Jo5e>4*IL|WrFeE001fI@XjrPd zK7TyyXqs74c;PzI;y6s5V%+_%h4)L_7%f>H_kD{;99Z6dz7nIl;DrP(QNW?`SGjyC zC>186U@Fhgcl0$ARJ_!HiQ*+3%T4Qu3g-XD@V1g8fdQG8u+T$#(N>#K=wK3}l1!Xa zPJ+}H8RJfLltLZs>KjB-KI&djU;$4A{gh9|3>ffLV6d2XR7YY0Eqy*%F8fuf6dUhj}#g$}qkg%=I_!3hE6@$d6f(b?JxKs(`SWwuG6O^RHq*Ti9NH z0CHW<#Mz`+4(4+S0b;{G5sfx%T2rxzbHeb{$bpf|eE0iYs zmJzjD_&r}0aH}rKY|v(@CKKcom~Mmw$cPoNl2nqT8;c+WrerkPQiCoK+sJKrB6=B= zNhLE_B+}TAEFTjr?zwHs`8F9@w5RarYgvD?s%T8`Q*$u5`|YWFdz^2*e}B5r^>~qU zXLomc-8pDU>c1+vxA@yh_&DDVv%3X8Nh=Sku6%-_(_Pc)3x>GG0EABMNK~zhsrZGu zbDI=?0u$XR!A=Axlb1J~`#*ok4Y}>`&eJ=)lAu;^55E7e$)0XWEU{_gNpJIrAb%05 zbNGGLKNRxTVTXwE&5+TP$RQ~11L%mgg^=V|v}Z#~k!Y z+!Z0P*~Zd&%O>l1RmGk}TBQUsV(tb9S`y&)BBU_Z zY1$7TSv=zCwcv}DPbZO~EoZ(gD6I&YwT11r7uS;^KE(5MnR#KjA;Hs55igtX*}Bdj zw2@M&B4NbeWhi9|RjCM(sgbo+L31(^jkQn1DxeZ0_Nq`vX*=vxNm5j+_z5=u3bxi1 zXC0NZQvbj-IwKP+ou-F?Mc(i?Mcm*Ayr+yH9xdv3D=~4WOV6U>b;&XoayKgiJf2~W z1GLTB`GP-OsEjy6*xvSHid?5sTEh>CSlf2{BsaBQm7eg7xhnPHDch$tp^*AG zA@i4s(c^jtvCR2=CYin`rD^w&&$8;p5voNF< z+2C^XsBVZV6k_qDvo%#c01KShscZ5E$G3&B>+NCe8gW$dwxGrEJ)16X>wgyPh3w*X z_cPtrnV25!0Qd!)*(C`8J*hR1o3DqP(d&W~dj2Ja=TiYGIfk+2R~72yZ3OjGo~2#l zPf%@fSvIk5wE1uQ_c1kUz5#Gd36nH=piA&)^^RaB#amS;*JUvd%mIJ;UzU?TDH?jM z_rj^r~dK0JpEnb&9%wQ7!*woqg#QWl1CX}`L{`u8HSKVSMpA5B z7x+i57mUiO79I^v{yL(cEZEM=?aTKL3Y#JfLV*#zc!TWk-HHC^or1Pq%u4bWR&Gwx zq7QezJ$2_=vgO>n1-oSUS)8^cns&Q?G6$R_mLgsxx_^%Ujv)bi!mJ?~^-K|%vZR1- zFhYHM9Ed-W!b_McJxQ33ZE#a%Uwv?AGlombB7yCYV8_g*NR(no%{tJ<{xr*(mxiQ~ z44EJ4(VYZl{Lv2h^I6(Uce7E?l=ooswyobl+Y2te3Z8m}+!`)N@8BtZT4V+)+g82u zD{``03fALrih_fy!4$>9+<5+zhC=)cIAn!)*NYI?ISJQLtsh>wC;R~;4#6p_Ma-_T z_`&oEW0I7l<2rz>^e%lG@4QDP8NRbV?s3elq%7`qL1Cf(VE5W#U*IvvCEc#1XUhIT&jF53p*t=k3&uXBskw0r$Z92T* zCmkGj?Pm_{VZ;>Z)U01>TyB(NcoFZcWnca_$9yro@X_2nb+fCZ72L>`Mnq=IPyDe| z8HQnH@gQv$GziE>oOSq%v|bRJ;?)ne%q#{`!mq6m4Hi=l@RqZgr*w84UsGSLdqfi4B+Y6S5#L*dbIX`qZ5$(%>x@((4O*w6V)ioj}a@;`?GKKv$i zZLsMULOI%deet?PusXWDA;8(O45H3rsQq5fQv&;g&Q70hCZfn%#m{9eyrP+KCb;4Q zPVU}lalb8uB3hWMJG&nvt}rQ;fZ6cfHo-;?TMCT9rlm`t}Wek;e!O~!4{l;N37RI8ULC#qi zTZ`|-p2Gks!-SF6b9_7n84G^~^m$8IcFt$B2{=(AeaF)WB#+4)lXDhEWyqHwS1b4D z!AFffj-Pve>r8cN&ADr2#3u-l)2UC6ZWhakZlP&v;Yn7M0auP9|2fF2`|?;SVpcj~ zG2t;;rv8*LG_IJU*VT;u`SzGzy{IDXfd*X zkr^W26}9rw-^BqM5T30Y)$&C6y&r7{G8&DJ1qKRzbA9{gyBh2Q$5&0!+okMY>!PSy zsV@pZzOCt-ezC8!+tkH4d*j@jB#s6?{oGs(-Q{n*VkTQ@z*u0T=AA-1*?0WiVvlfxO~zhnN7$a!XzRhpV>?iZf8!ZE*-P_~7mYcXto&t^tBGxVt-n zAi>=|I0PBo-3cCCgX^9B?Nj#%KdAXLKv6Z-Z+AbvmONy+Ldc&LNgld5@VO&7FgkA0 zKdIZ1?FX9J*chpt;o7Isst!s^wDY>7xFcx?jOK0DD7_6}bd?XKw~{C|(`9I9liG=v0%B@za)wG*|v_QXv7bXnufLOyy2o z8J4C@(*{Db2IpL3qPS<%70cg_SASN%tfm%JX^Zq;mol^UAtrpin6fM_~HGaZQ+rKqNdp5) z9I!~MyqI0O-X~V0riNfz+yFt66mcyBI7=mpGg|nw2twVvYA&9_zC)ZCt0r;iCT&1X zA&!g^cXc%>&+NZ^`3PmIqQ2h%nHtFYRehu~CdAep)3zAqCdTh5m4oor4xXN8W?wR_ zu9gXh(NxQUf1z1x*VR(3RJ8^8*IG>6GJ8*=t`c|5jAS!*wK!CK+&PPa+W~?AnIAb8 zIDo;gs$hl>wP;3dyfMTy?n?&NB76+;#dykN1deq$45fU=(?+#l85Rp zj}XTtUO$GsQ_c_(`mRXKEnPp3vQx<%kzT-76}|XFT({c%KioSw=|mz zTYzY`GTnp~r<)H6mjKBx0a!N#>{}Vm2_Vs#GF~<*x}h2VhVLT@!bAR^B( zeq;3I4{h&iZ>4!}e?d8H;&$03g%WcTes)Y!ck~+(JbPVet55G`#MU8>sx~o|wdxsL z<+gjq4n^zC?|GRQV>)|f&N!g3qBa$=vY>y9WYKbdWe4hyA2e(kZqCnLNvBG3pHqtD zDk7JTr2}dZMoTyrGVEvCPt`2lsA?o)Uu@vp2=HRts=wkuEb!&zMBWu3$c8?$a6xWC z^I+bUGg`!U;RiiaoUElDqg#ym$k<$R-Zj)BjXc{iV|l`li&=E#h9kBGu{5%9Mwg-v zoS0oVDHHy*~> z(Zmn%hgBrp_1!P^QZy(wGSPH1xGAIgp>#xP!-C+D0nj(&p3d$$jaDs69HAuWx!rs5 zBPzBlsxpQt^+z!!|6k;b-1UU4;icWVz5~8`510h|+-SS97IKa=^4;SX6OlyS@BJgI^?@cHUpwa;`=%CZ5|+2i79*Om3E^x!bUs@;k7q{ z2F&iHPREoZil4QtJX{<9?`knF7=QLsaw2yFc70iSu+QP2v-u@Q81ZIQy~a9*j~_~Z zf&;ig38mis-INkyU>`)0a^#GBq<^i*DIW zZYULD zE`{TB>)tS|1Ma(>p!ZT&_aJh<)elFM-|UMpu{LZ)>?fyPV4);-94onrQ)?Jt)4cbV zI#6SC7`mx^3ns$*(&-Ikst8Ml7Da3o3QZrnG%Q@sQM!jF0v(r?^;7Db?X?vy*o^S; zu`Q^{ZQ{^9+HV*&?uj$<;_7*WO%IigaPi8QZ=o@A;|pw0Bm%8V9inl%5%vB-Euq#+4yfce!%ORZTR%O)<4#;fdbw91$%ZZ z!?qw`o#L>~O2BVDOYMR`mm%xo!`;9R)>i!eH_|NCyAw^^!hjSc=}8Kor`6_+jNX~B z5QL7PSV=>J3bfCgJ+Op;&FbnfJq?IMTk70ryJfEVh%YkpzGITjhEdHRGrptJFuApV z3fa6#e_!RVGYx87eDxX)uibus4Z@VH;TLQ`ikE?6A3r^p`wV@hno%w~vxDCGczs(X zSJs8Nvcp1OwN?mN+LgsTyc(|Zr%Q5=gpmP7P(v3J*=j`t{e)6Uqe60>u9$Bqf{!i4 zd-_c1VyQ%+>E8E`ACaRf$*wAvyvS~8Hq8~g$-KQf2zOwVa=}ep``w zFY#>nJu3{WQX^@wo)2HULyKX;J93o(Xhv@8oZ&?7qSM~aALqMgmbt@m+K`#pNz+(8+5u{7puDTzj~scW~U&~*$1I1arQJ1_y8qe z-$MFp0_uySmFaz7B!1%#p^qAwZ(SGl4Gya)N$fwha5+{DNehpD6P15*xt;%qND@L` z`Dh&4`sBsjj*HXeC0@52+x)pk8O12JG6SsY`o}yA3?DfT+qn)KIgHraLz??c`$+h6 z$H|X+X$d0Ls2S;w59*{iBCXsjyVQ=KM7uSLSnh)LTENd|HQahELJVC zL~_ar;_}LbV=mv-j4}|UrGd7LocU_$^PF?}GP$w8Qtjka+G9={sYj6a)N%;`TJ8YU zpXz+XgtH%|2EtD_hB^h;wCe?hARi?jK?m(%xHSyK&WdM+oEGFd}+kE zHTD2n@MaIDaXx@Pa}Yc@nhah;P9nd)7)t%&kIv4Pqoy?igR^8YKp?V<+|2~n7vRpy zQYK%sD=AB!Zsdr8Y%&&Z#KP4VFK07E33Qg?7sD?HbQl2#NC2nIgl>bfUHz4S0;b=d z=lD}(>p@9-Seumc?9c&=&A?#JvLuY^Rxm&IaQ0^RnYB_p&3skjDUuGJ|~; z;w4uFHuQf)F{L44GSfqPNJu%ni-am!Y3b9PluZMJS41y*tmnJ@b^hnB+B~ERCXfB_pfZ5U|IZ1kKhZHQ1%uA`*;VH znS04ofk z%ypMd^4DcW#XCjcp*GiFi8*C{CzfceTb$&3vezCuv30yS&9uIq9&OyAxaAT~m`(Jy z?bSzrdl|L+_VSKC`>rB7m>%d&O0pFQ^bfd`%np3^b^ZmJ33Xj@eYcJeH!5>{(xLnI zp9}ntitFu%wx-55T>N=_ny$M^IuC8Ue^`Wq{y1zI7M^fE7dlrEHw&+gd zCLFJd?fP!24YT}G5sy$8G)^cmHD;VQE};Z+{2D-;XmG{P!RC0|8FKE$%iWTa z6)rO0irdvc=mK0?D&WCqufH-$h_8jKs(m0q^EpV3sz^Tk`*Tszi|2PvrsclEs_-Ar zcAS-_a|>cBcMGV6N`wWsH7P$N)C~RuYH4f~g1FdWmQxQ48o6Q9loz|EX}a(YHENZye$rV$&CcxpNdC%+968j#+H&+H;o8s1aO&Z1s`BnCL? zmE*CzBV*_ExQTCXAVuncrr_sVDm(F%ij0yta_Zo*YPyw}P8!1vL@488RxuW;l!hqS zu9Ws=-H>np2_mNG)iZ$7aZtm+pUzzrQ)TExDbuKU{x!R+T+@pQgDYEc19E(pobW46*;g?xrvS+9_s=>-mT;5cGc9>bUQtm zJ3P)~taH8p8f<5PwR&DIIx~d4Ih1n*!oPq2{&DL6UwP))rNL)5W1pK3VK$%!%QNP; zZ(&m>5-!nLvX=*@I}J-hgXuFCPc*3dp17{)vfD>F_S8Vee70YI8PbNZ{@a`Vx4nAN z;b{<2-a?ps;@!IPk0xEAwSL@@*LJ@TB8;w4$>-2kano z^=WLOI>8>&0AS2L(0yUgQxNqRhF^`OqAK)F8-n`xQKM&%hN(Q(0yQtOA=j`2)k6pM zW7#K3|8SRIa>TEbmsvCiX^RLEnt;e?{)e+RruBLSO$KTu+6C3h@>_duBPpZF8rpH+ z8!5|i+}YO8%h~mkwJXG=9C6hdx#<_zF$}b0kIeZi6_rPF?#f%9R4W@OisTZ?A2p?9 zWefNONls$S>0>xl4YJ|lg|K5PA<=xg{9>U`@X~5>F~W&aFNjI$?$~27IQVuqQVLR* z4uFN9=8t|n9Zqy7o3!|+$ZDzHDBg2{_Akra(@faBflXtcY#5~hY zMB05VaJ8=zOqYukQSc7#yhe|OYyoB@7V$!^O85hAbKsrvT%>V3yqf@S#{cRrnSloA#SMdU??6i!$#XB$=t0MlUdq=vKA z0}MntkE5epE&Uh|i`3&v3=zZ))o>a(ACIUTiZlax`|rumHni;xIFYN;0sX74>zG9T zD7@f=-Tv#zHc|-(W5IY|9Tke(6|3s(RiYGxp2$^5?%+5sAVatVTjuUnxD@pX$qo;7 z=>4v#-gOY}Y>NvPf*X*=Ib;=SC$Z+8244>rfqeSNH+0#t*bQhY54R09a}5B42kxpS z6g|JNV*Z&F+)wx|k$`9-E zn&yL!aBW4Cyja?y!~>;WQHXz2%$0mOK7FC`c3>T-J&TE=$-!tk`$AoJ5+275o77Ut z`aR7<_O>73#rD7n9aX3FMpZlc%2_-0$XPr6h~hZ;9QJClk+Y7l6DvUBBF`&ixB&kE z^yt59KL^=v z9ZDw?4gb%XPA!>mVsOtBrbO=B@Bf4sw(M$P$6FkO0&`!-vGd#X4lC3sg z6LMN`!`5NMx@Adl^0bo-tb-9C7OFRUc;)h87KHd?b-O;pbgoM3w;Agpg~#d+NU*#% zeeG5w%ec)3%F^_#>^>Z3^)zDK2D?53x?!_8GTQZ)kehO#dE6{KxUZ}cnYId$Ehw>3 z)`mXC--o>CQvhX!bhej`=~EI^k6_uZ3gkShd04?PrE!KFYp>j~E&LF#qb~|*AK*k1 z86IXvip#1HQNe#TCNgB9s@86X>Q>G&-lwrLop8$LcOZmbj@Rdhpkp#L7iS<%b%MI%qHLjDQT?EIf2#*baKlqvVUNB0@ZndQPOsbPOMDjtnBnwF15 zG*BXzRvPMleUgy2x1edLh63z#<)Qex^Vn-3!4K$^(w zK&xOeC>t9a%E;?{@zAR^ySg_tzJR+$9d@@P)wA_hbOHi`-AR@IHShbd{(!7V#Z@=p zQZ_^T31Mrg)6DPMz2g3Op!jQtM+czk`ocp1G~mCnqiyMG;gGu@Wfie8jsNSA=VN!N zL;26l48)-XTeZlDGicjPNEk2EYyNJ}kvYIVWpQQ>Qu~5ro4%~F#q0+I6cqY(X%{rV zh2)w4>EhX{jgT~1vaKn%#L`7G zU8`4>E;6>(PDfip0}D~f8`Z55HPuR3Ok#ndcp#FTnFVKu_nw^MnQQ;YsPoUS&IYyx zR?XerbPU$DQ60fR!8v=(liA7&=zhCA^SneummTiga zKe9uXOfV0uPJP|=N^)_0fOsL4n|?X!jX#k`1~182Aiyl$GL`GNea6z1B)GiqTfl>7 zuZGGeY4+S_0@Cg`o}bUR5w3T(nqgr(H&3U!jc?RLj(mylc1MB-H>$U^WGxq1R*vX> zLz32@?OqS!)h47_wE}t;x&FH*gL;cWHsVzg*kx9G>4}__sCIXp2k<@l{uOfeiU9WE z155vpy;G3c2ZTk_3}|RgOeT|^SOg>W)DtbMI1CJ?Kx7h+gv2X6L|?2tz+D6;yH^eF zZLYjH6xJRTw-bPkb{`mCRuY+`igUgW3wEH=S;(iikD$6zH}G?vjQRUxZ3fSy22;)k znn7Az?}Zk2gO!KLnXw>dtHeFk{Sd*1a3g@1XIf$)jX-Xl8>NIUt$@}IqJq;F#|64s z9;q+nWJdnn<5YDu2R;!mBrlWZp{>pmlnZddA0W~e-u6OvN+gl}n--9Pic`uUleu5+;?pa9NP1b3$g0`DI_S*P{i6|4dV23x$#zaAdbTfK z?;%?y*iYlnl#cQ$Vbhmkry3t|GoYT~%w(tSg>n;$uQy~Mo3%WU!?Ucosn84OM%EVe z1JG?Cel>6RCN9w#v?j~P|1Nw+_$aXCX`%ZH!*lye({lF$yEMa@wejX2cl1n%;2r=i z+4(yJ^cD~~3$aq=EBA=Cs^m(x@=RNdC5i4-eKIk<3G!sGT+hN^*ui_enNOdo-J?4a zIv@%ML8gB**Z-DhkBjmOPRmq^JR$hYw-h|KqZ+ckehAg@(cDhX79vf(|5(vS6uXxJ z9B)7XKoKpd(FRbLR1w|ck(1MS(Ey#*DKr9wih`iV-&186M9LiWnmH1Zxo0jZVrfAAm6=8&AR z+0+-NgctuSAN=>{S+NVys!b4YzZEZ7`o*{v8x5K;=~xKBFge^!irB?9(s+{*+z57r z1hPZ()~@;No!2MW2sPP$aytt!)t{tFyp4&aLe2>O>`_JRR7aJHAPVvDoeMxBv(P4+ z+QMfM`f8Bucr?PZPpdS2I6q$acx9-(7JRHSEiK9}|9d=q@O2UGPbRMAc~`dnuWEB| zHd065egi|TTj!-5BfzCa?>`IFG<(Y{!ZQtZbZAdF#5`s>&7dxeURtx<*-rM^8VT2m zLyVcR&1##Q57_9x_0%HEa5op!LhO7Zwv4fG6N@wK+_0v41Q3pus&=|WSvh7wF}H$1 zWNnGYI0Qh0EUiRvR|Eit0-GCMaQ>FgN-0t-48Bgd>z|O_sc8|RO4oc(7n47C`Mnp@ zj|w3t!~F+qhMA>8S zQTFUEZKH}?Hos4cy*9^$KSqFh*l#$f+NRp3G}RszJA$%P{W&59thfw>Uj(CuA29c} z36u>s?UYa99w=)QjJxa{F2g2Hqa^12!$p*uGFOh?yF%E$y$PT)e5Y-LorD3bpF*!& zT$DEYK*xGdW{HEkp?Yh)qyEE7jS98hGu6WYru-vh%K=+fv)uEcUUhg0r&C!7m~o$) zEmRfERZ{3ws_IpVHLHgb)#HS^)E_kY zmGnPrec?cOJX8S>SYH)MLOP}#w%@>l-vV~{g`Q{8rIAi#d!A{2c6x?;u5!>T=gDr| zITLr_hx7Y7?R;2VvO?_(v@kiiJ+FOtXn=2-n4q_Ud?CbU{^v_?kkKllnn~H}FVpv> zSQ0&&(N?GVmrJ+lxaqWX^x@!C#=@y|yGwC9mVW|+kleTw#3Re@Fb;nvf{IU!=}+#(zQo(2H0j}rFZ8vFx&=WM^A)5iA1+v0@1k-3TLdH8GS&HEf==kdkx z=PT!w((9+;VZX>6Ska*f(!2TwUs(I|w@)7TPsqLq?-AT@fdj?0WM*E64{XI>VB?PN z2x!rs;IR?k!jijx5A`OH6*;{`KsI7@6NIyHRK9+}!RMk`Z3L|4Z9cL7RjDZV7Tz;w z?Hb&tq!2S66X$^3CJCV-aAzxj_M}XocQteHDo<+|A1VcSkkTQJXvgMC+Jy8CI?@RN zh?w>REJ+H%wn6j7q1tJ%MKb+~RK1gz$-fC?xT9kkHn|sNwOy8LNArW0ZGvOni>->X zm$6F;arQY_?pD+$E~d?sToLQR)i=Sn>3MJ!9DdVjrtIw*%+;<-w#a6!$7ZT{APBC# zX(}s6r!J+T;%C`+T2sXfQ`kl0w6EZaCA}lP6(v7s@!m~aO-Txh@+^3obra(x=pRz3 z-GxX%61&I~%u`J^8ytwhO5$faw2c`Mr%}jwKBF@NrO{MX!EEmNp31sKUv2w~Nlq35 zZdPkn{F(Pr5|x6izJd@@HpA1QBrLHDdkXyl?;?BY?R;v-Puts=?^OKMQ=@WXR_bt# z^XAD)84h{!}j_#*8*(rA|cwc!cXICMq1qR3Lu5Fp=yj-XSVLz~{fou{t;GN<+; z#$m_Fin+AGxAHHgJ!NI|-V<2rEXyu#9x~bzLapn9Z8{|%maJ}s&7@4LOFWh{xC-i% zi@iQP{^hzV{FOSAf+6CBGZI7x@^gdY(4b5jOWUJ_;gd>;zmolWG@pK9uArh66N~)y z<%OIx*g_yT2;|3AQg<(_nIT#ThS4?HseeU>+8fIcTvo5Z(Wx^Y%;tBU`DLTg_4GF@ z5N3F>QcE(6$D!WsSbd`>Fs}?5O9F`^3}ZLYqzh(f`h7i^{SM_~R~^TO2}zN0;I|+s z!~cpQ!l7gG6bE8@frs#-hrXwXcPWD;07NhNB(L~{MaYo%|IZKkpKpQZ`49W4J5#pJ zF7%2)8GnS6ksvf~a|NY0ETHd(v#&fUE$hcyzm_3FS!=3bEXh7o00|Hewscu){LkDW zJ)O>VtWJKLIx+qS(YS~;J3&OuldoyW5Dlr|h6j})53Hdwiah{D(MmXuRv1SB?JFVk zS6dQf99x`v8`}cnYj2mY3JXzR@TkaXqu)e+1V8%m59kxAFY*YDsqgxjQ&6I~MczIc~zDm}S)4l@LSuVww zDhV(izMlhG7AM(F&b;ugy7ws?0EmPqR-{Ve%2^Ub{54rbQA(NTBxx3j5FWr zX(b2v3`(Qwc@uLkU-r#%mSK<(Mpdtvew7vxd(5GN*jo^4^EQLpta(9)w^nDmBB-*S zQ@w9?zL<|!IJLQkq1{!(=F2y;e9fK|EV;U|DEmBrIPuaGSch{y3p($uv21kxMqg%e zw0?0KsDUiDJ|>h6g&PTo4ig6^l56VC-&Kh>nsH?BYkCLg1wp6%(8R01G1heyduwqa z-Pd2S3DQ~McE%=7vyurs7u{v)7nJx_=)B77tRNl=mfll*B!Scnu}wyfZ2#lFRn#}jx+`g3ev zsqCCE`j@apa+?JlCGV~D%35xe?sBY>FNK#D?<4bhkSt_hHQJxx)m4v8%6b$f`)_TB zA?v|p4owBmy&ty4&sF=2Va6ji0R06D5i;i7E9x0|iWc4LFP7mGyx`3y9&AB-C+vLg zN~{sKwXkSCSy|~8@VIk|O0ftSXeLksy(FfIOghG&a{hEp^0VDYT;UBf1(8%=-m`ez zKA|Eayhl;N2J|J1E7jLdKj3fuhK~4E6-kjJ+I#fJ#C)xO3z7~@#GgCIzP((*2<_k+ z2n*gazsb*IgNz7|R(FMO9XByH4FbbR{RN6%CXh|Lw&^?Ka^qOeN6ZXi&xy2ThiMmQ z+vS<#UfXGFdHfJtct7rcf_3HOnFl_^X2!XE81CoP4E2Xj*>E^#yQYm4o$sf6A4|SH zad!h^+lw-odoHBzN0*njV&Cc{>Vnu2E2sV0GV-0}qY+abJFxJv2WXbr5QpvmAjTQ} z##z`9R}5-LbzIV0Kl?QypywgF;Jr@5(-O#_up&Ii+nsF>was+z(xynhI@v!>C1Y>-R}fBOZlSk&s}*2Y%zs2lYfk{p?r6Tg zpg=IEv77uI3x&HA5!+5@ywt^}whp0DyKg=~PuocH76!~MKT0s91^!(ABCpv^1aPqMSbK#@B61LO}I`jA^>e!wYs^8`>2%j zmu|>;Y&!ntpmK6YG1N@L&%tMnfv4aLi%H*QaC6ukWI8wk#X&5n@Ey5II zWYG=5a@MEn*tyqk^QpE1v>S+VpRhD+Z2^g^siqAoKQ4BIkdw|_DP$v_DFh~_b-)?^ z9=DgLfgDHHpU@V_L<8b*KWpbpMgq!>yPxxVuZf(xg0})*(3-3!`wzy`)l^kgLBIZ6 zUp^)tFA;{g;)+4EiwsK!hQu&=*Ix#AvBmKl4gen|xwXM3IMV`Ga4(lb6!axX>N!6W z;g3<`>k{+-&jKY8)uTP*OXJh3l^O8!54+llO~?RNbXY9d{0Fj6H^8bX8P>8hCwtJ* z1o*`T=qqR37V~AN8fs{2c}_JYHe$%V4loSdMAKB3!rpbbtwmWaVPRRyb-B!34(Vg%Pde zBx*GZO(t>m0+@zOZ!e6TkahNBB%pk9Z3{*GF({4LQ=K}Bo}*WV349mEB!yc}&W_FI z!P%bQ;BDsm%n_89&bLy+uJ%{44wtkO8Bl}N$P~Cwy%ott1;;;&j^XUe3OLVBFftgX zIvSOi`&U@Nq!30a3iPf>Ov<{)k16yjrx|6V=P*mPA{V|zAT?Txv3^LT6y67h)Ljf0 z+4&|>A%}wLgVoZ4;~o*^qPk;KR}il{y)l*nH4z`B@FQ6~SW7%gJGNIW3RT*D{}>gU zZ3|yJowi9h@P`}(liW`!E`*VWOPhi->i1Zl>#j@kE;>(;WgOc=G7VHLo|Z^fdYptq z$nqk&1fgTjq~zxNlWuQ?SkB&I4*{&sH%S;SO!y9@qe<+z_f$ zy1m!jLB2*y1fGWTEIjny-;*iM$G1L)!kw6O_sqSsoBy1XNIVY)e8T<+M@IRkJHJej z+UtpY;*@FWyIF+=nO`yzZ#EOlW+-3i#glxdwU{J>Z2;}i+ZzZ#e=fZn{2N}>;QjuM z34am#Go2^+B$}k&!|*e4#v+XyrZ|ZnNOz?tJ&w`k@Kt zGhfn$#z(o{BKkpMT2hoj4gxi((9l#%nw@?lItLeUV82|NNYms}T=a2FKV5r=@Gal+Vmfw^_LREr$7 zaGt*s{1ZwvdsYBN`u(+i{fB5jDZPxUi>#KZS;vB?S>d&dVwM?Nj{Kpt)AFVSj&S&x z&ej;vc4e(LT)QQ{tHo z{-u~v=qf7jZ1Cy2VtXr@!}Oh7P^BA&m&nG1SF?qi!ydI%JT7}n#=#5wRctM%l#jD$&(D(~ z7-!MXFz80EqD1m^#^8yk!Dy=>sI<=Dz}>f7&L+wNGx&>DDP_0c)at|dy)5|pRQxzZ zLQ=B9avXSjzK(}R$n}WLS1K@U67WPrtC$gVc6Jt1^xtyxXgwH6$5zUYQIfo*XCybe zyshoU!()s*9F~JV<230Wa3Ox`Wd8hnc*TPm`Ju=)iJ9}tZP6`xBnGO*(8t#(xyX z$rp2^in8vN;F2hRX3}JtwK~YxwQdn}a?KaBmj3-Gf-)5Cn1i`--$&|@0Yt}7)K>^m zfBxOm{>nHNPO~GfPoci(QCv~&m7yNh8U2Jbp$35~0c(d#$+Zf?`}I*P+VO3PHhD8= zkJgO5)c%?-AHgpg-M8I3uYIj2{HIEdEFrUCxnG8uFNV`+_bG2p7)5Xyb#O1MQ!Rr4 zSxXr;byse`Oi03K!gAkIB)pOw;3JP57sIqqjYxiWAuc5>kg!nkbo3_F94go+stsRrisb z@8sk1VTQzrYPXUb0>23(TPFvOEvgzk$5GF8`3dPfCx?He5JCJo9K^b!q-CCO!C7TI z+qW%xD?DZBOH)T-XkQS_w`%$MJe)r%pBaT=cLKM!BN*BOtT+oLFDNO_K9L#=Cn$ZP zHJ{r~)8!7tXUW-!(C}S0{V4Tb>AXO(*%$Ee#J$ZZzFL>_T)RO= zfMKuTlqA=0#Es}mVpBU6BfRF0lCwPe3-5Ac0fJ4rJzNBBEUv?4oT*tX`?l4NE1IybFd1hyj8#k&x5t6OzaqT48`GZJil&Lg}K`Ipd#A6;&U(H(nIZ z0=pa`i?+={+@z-iW4P~fxZ`fB@Hn+$*>GR|D}MT~+XzFO!PLV=k1KT1TkgJqWVt%E z0Wl2j&C}6mB~BpzbLURt-@c~9{F|g1=YXZ-#urNtU59#p!>-z#)-!(MKkE4QEJu@8 z8P!%^y4GW-q#Ckg?`Gez(>B0vEFH80>Eo9FN}5?bK%Q)f6{)*mGxKG|u1``F{{4@PQ&D@g7kCIb83-*Da0OG5ook$XW1SZsG4=@hOHdA#+?IuLB`1_B~oxi zc6o*iR^6y$g{b(=()F2|XReGp)2X7e#?z}hhpTu$rW-QQzWOyMk9+Am2zs)-K-H5r zzT(Kar0_$~ajjFR_E$w|_3jKwc%@)V-fN3*fw5{}4V6=zC&+X#pG}a!)`k>uLO=nH zc{)`8JNT5~bV0gaAMGq)p*qJN=Wm&K5#vptxp2uJZOJmrQ0Dl!&ah@h#-#*4PH4Sj z&w*ejJ zu&JfEkeX*iE+k?zrGT7dsgUV{*kjanTkUUu(b&J}xwP_C4teRT|8P}#(q%JY@v>ax z439}`p$30&M|}di@`5at%z0??&#L9VDAi{W5A5Uc#9H_}XQ0uyt@H6>(t=RaGedXz z@SBQEO=0O^v#{uS=0)1&?fr!`ZPe)Hydtu7$(W`}7v|Z6Mc9bP)8?tj&0mU#ME5ZG zS#}w)R_|~3Rg3AuZ~VWThcc=wAwCDOx5t8ElJJQj6GrwO6@JBpX!2z2Rdh27m6h8c zSgVIOfrMl_L)4=MGhlae9FE|nddTmKgOlMXV&S6^|vXL zr#VBAJL7{H+-D3h9@3oJ3Km>xCa9(@kMp6AryYs6DzEW4$~N97G{PVoUSgnln#v#`vQ?*SgHn-izS~^TbaqS;*QJ(EE*F_lpt9g>MMWsaU9JcSy}C zop!HFnE5*0u`aULgU%LAGjR0k8KqQX`jZJjn1yWyN!F2)8W@QoUEiqbM5Sy8xQoM$ zBj}$#4OECqIZ?1#R;C|+309hI z7(ME;qWr65_EyqH+d$^HqKX0ZQn4Km}$|567Et!UWY<}pH3zl;iey- zqoTdywugBeJU9ImN#_v8|DyJ-QW1gdmtB@FnCy*2FtdR1ePfxh4bO=MD=puMmlTf^ zHvdIz_ny7;i8K*i)Z)k0Gg~AFeB~)l*Ae%%!**-s5%aa+=F&+Q_qV}>6fy~S5T+&s z;~-o{iFPLyg`Ob2^bWyv^I~QC%2HTdextr5Ovqq{#cZi%^~4l4o+Cw1cN;kOt^v7g zQyFI5V^1kP5m?i`#mw&o4vaQz?W`O*fxZpstcad{J|lYPzE5~X=#=#jHKOVPCzzR7 zylmIiTVAEF@VpA1kzP~X$3J8G%De|nO8y?Ge9tolJv^}YxC%3!Xp?<@35c7N3^X0) zF@JfTN_aycTJ4C3OVJZtj|hmakrV+Z6y%vrCneT9sY()molhz10WTgig94TDg!E>J z0%8%!y^fuw>T-f~?+Ko!NrJ$kS*B*KDp$iZPW5j3C=kbTz zr%Z0OmWbYYgV|rmYU<#IelS`Sy({S|Qs(PX00c}PvpeBJKD9hA^+0)I>WG=c4AWH4 zL*|Kf+!JZtsaQYNzctyFt_`HBuZ;@Y<8|J<0K%D-d2c=FF0;EW4`%8UW_NG$!;H9~ zEff&VTYU4)a!Mt}1p>0*Jvwofvv7HupaHZB!-`_*zjLHb*Br}IsrEaX>rY+(Es(+) zR{%gZ6_H!$MxvW2;wGK)gY#nPbHA&FEtBzW+cUssAp_b~5v^u3UsfB5Jz@zRTj2+* zg|mzS@l1F=JSC~iv^A;jUV3acG24wqkL@cTeuu@ugRzaG!gl&9!^d1$e5>yBt{(hM zGxV-c!Wnv<`Zhep-Vpwb<&{9Sv%&3|5XVw`sRYGn2-q=X-ZFI2D#(~QSUC-`yAmme z5|g^!W9s{*hf8e{lM`HU-<3B8NZJ-sEC>?=Qq0AV4{HycM5nxypK(Gw{^ZJJx|}h-MsGUJ zjVH}-GcqnG<)IGeqASRu#-VwJPgSy1>Ra$@4w>B(*>|+PX6ki8xR_u((MT@B`lJEBCkw-8DpMO?J+;xxB*bPa-A`6i+F5<(O z-Fc5fzLWIBF~EDN?)O$4e#&PrDM;|T$1&?u!FW3RlodE}9~e^X1vBLH{_o&pAwMwD z+uNJZ^MbfkIVZ*>@a@Mke*i?N3VOKMjOqUWd$tc{(=(#w-$CmJ$dJF^+D4tYgDg=L z@N71MaQ{VE5bw=g)NkH_i4Pd0p`wmKr!5Y-29tD;vAJUWzov|@mPI#4{6E;-_|Dky z-Bc@ZlEzDt*hg{Uq7PIV5B!8ZfldD9#kuR2dopBWZt^U~A%mJNN&A-P?fguTk5qe% zNuRlbv;!-`gmJGrB;4f!OxG5O9i@Q$8_jJ zb*J4RmLr|Ggp@lQ_=)M+51-q61TUQ8^EqCItj_*-K+|iHoRpyVF;kMpB&5L9*UgAi3me=boo789`g|b&v#?^RKba!kg@BQ{k z#Sb}@PJsIgEY6vJc-K3L>HC2}>Ox)QO{JFF`w$U*g&N7Gm^dkPPd z_PB7wON)Ig6UZaUAncOy@c!mQ{ri~)ak~qw=cS2^k1aKpMc(dC{9Bugk1$J)ek}dR zmn7>!39Tel&>E*uw=WE2IR+jY{QtOm%b+#`u3NjfI|O&v;>DdpAvnb?xNC9O;8LJK zfnvqo-Q6966nAZL=j3_b@64R<{loB!z%aRY_FmUoE40?~?E~~+i1_?TdANyywl-i} z;!IBrm2p|6w^T{P`SpPXep5Jnjq%g&OhhT9*#p;uoiyA5Ey85HZ~H8LR-nt5;=*7h zh7n7dR^SK`_SeQht)U&KnI_>x#uCzV)70?AEl;h@KWk%_6Yu^SkEIXxKShy|4zY%l zjzyjQM^zuHDMQ0LHIh7SVfF~qq}?0{zBMmF*O%+@yskQ;u^6-#O+tXHXYJLxpS;SX z)Y~jGHaS_`jc9*jof-D`raQU59|4gYGicLVp5dEU%3d`k`EA+4SIk2(bs8b(*0y^b(uJ%q zO7dmRlFIJh&)gw9riPx@#n0ZYg5(wMhmu1Gv2t?u9F!g5v`(F{Lqid_i4D#| zXF~3SDy+uHJvxzrdYT?V>uM7kD%v5Y3`k0cFlnd9+g;5_WXJpvkV3*>gk&~1*1y=@YRhI2Qe90 z_8DD``=DhNzDUCg+HXQ+9R(a__h*);aUO&XT8w8K>Mbj^3iafGr6})}SMM>1ScO?r zYjhru9<>H@-l83-S#PUJce{qji^b9vrJd~aFThVBu`+3UUM7=zYqh>5!AhZDKiUQq zZRGW@R&zgmc)$4~BdJ+YA${MPWL$GvN=F4AN#h@bqHLR zm`=6DJmqWXPR3w^)SEik;tO^d)rOCPRHcj(5krfx=_weYQXt+34(wtHt#pJK-Elj? z)6y<3!7x7clHY=fgqC%?bKU1wSuyGKs-79b*OT9&61cn^d49iDj$Z(MMM5-4g4mGK@ zo$NudfBF2hB^>^&%5sAJD?~^rR)TAhKcH`$OE8hykgpf|u-oG(dK$i2dC9?yWep9e zDp`^ELG`alDYM4&(#RL)hz4fdT{C~)<3d8G=j#!#ddoVX<xVbdVyrAMhI>iEWba;%@uEYgZGTQIyIpzmG}h;R3z)^tS`qVb;VIvqpRzq z+@1?|7ib7%moj~!(vHlVP?7Q*u@Gr%g8P@ZOr?y~+ush&8J`o1oDWIN0fZDc-)|44 zI<)xMBRD$zioeMoH!R_sb_iozh+kaY;O+X17)Um)xO-R!QPf9(E^V1~52;!im|!+By> z4&6fQ|NhOm8VJ`DL(iD~!S2Z}rfVWutBuHqo*(;X-Fa$U9L4Pox$uh<*oL!D!GE`U zi4EFak$pc+4yS<5xg4!YEA+Vm?OIB`MXl1cjtVMgpn2(Q#NG8BkXM@vOTOon#L{dy zlTb7INjYm?cP(n09cx}^Tb2#6W*R`kiav9NV8N*O>HFz|8b9lj# z(nPO0B7Lb4rpTi##)e#XE&CK?bB_gOKZ+szf6<_`7DGAq4WXEkw|B6V-Pz2?ZarfI zfOYsy7BZG6zrl~N;!Bvn)uUj3X@LA*DDC2`r&#f89a#f0ARI?jthZ;1GT(Deh-keM zCgT1DpFE&EK}&o=YIx`}-=Uz*daYIvdaWD@c3DOko^ik7{bu`0u==8o7T4{K6xS7i zG}&DpTt(R#zPq^bxA1qst`nz=iHOpNA*q+^f;zZ8_h|2vV^d>MgkKB=xcC~Vw#JjD zRj5{6?re0>XUpx5F`Q|cJ~03GOk67EMP*-|9{cUn*Wer!do2&Y4lTITEdgk}_rSux zAL^@TrY)irUPr4de7-AlSbzgaMlnjmqjzLnhu(gEs_r!QLU9jAo}Po5e$aA% zNwSL^CyZ&rJaUj3$Xt*jWGV1?Vqi7Q+d{$#B@sTQBsMEK4x6nm7>j<*GTG0BMM_ei23A>^bk5)4Q5LzYlH*<20v}yAm1+ z7d##r1#b1EQ`IMtSF?;naDT!>Rm0_7SIfd|=SAZP=|2(iVwJ@-l?AN3;)qPu$SlphFK$d#>Ew-gdO0VHV!gtW`sYF?t(1>2R+b z=HUZmYiBHEr80?8zOM3=@bu+cUPHN?^e@x#i&n!fHY*)J$crZ;r9kS)uDfs8+WS_*V?xcv`qO3^3>%*CvK z22o}uBQ3_sh~%(k0-_&|J`_>Ofae3?V{@+bhTwm^JHZb9k5u+@KOjy@G~bdqRK#1Q zIHj>`E6=cGh}=J5i1qOCIwioTXO&ch{C0|T){7_;Y~KoFiA0)HTT3Bq@ZX{Mf4@AS zA-yU~>arSg5F!ogRU#@BhV+|;17QoLh9ql^ii0Jq_Y)!J3;#Tg_%pEpzM`1M#J#8* zYAz_9&38`|O7tlKZRmI3T_G_!ZqVCinJ#e3)Gi7O@EpXo#o^!kXYk|Zc)eZ0Tucgi z-I+J@^J{Tz_a?)vu&>@UkD^w`z5i?H(>1EvD8-XuMc6Cm9%$Lry)CJHhFVBFLLdW1 zkl~a09j^Ha*~p9lRecJTz8Tn9kGd*?Vb{lmH>3cMYHCu})qn{0B1^_6L>!%g8THC6 z5#O620|g74)DMurQ6-`9fS`l%=)6YYtcSMcKBi`5YnFB-$M|g&}IpE<3-V^*-K2zhOjrQe_^tJEMahy zAP4lpjD~E+g{;%j$!kZH%|X<_OH#R)an7+qz_H z{Dh4BQ+*rIra)7lPh=~CSXdnA{ezt`&H=+f5sQ03z_Y+FgN+z8u-4X5ym*c0IXtw) zka*a0Vz^c`#`)iokimL0x-bB%_o-F1#;Kn!OQ-{IIDOVr8fsVTwt^2l-9#;De+fy| zc-lG|D-~$`2Fp=n_1F3uJ?#8vTv#YtqNgK!iZ)bjb0BZf)|PU!VCGE&NV$V3Qs5(0 zrh{(w%<;3+!$QpPG9Wh(yhWuW;{3#dU`86ZO(Hr6%GmTcQS7EN+K?YjeaYcag-w~( zAnQt0IpGeNE>(7&3PVS8=IJSl+hScU5&;`&-d} z;!+hYu%R+S*TK%jw%n7m>k-Po6z!K=zJ_Pa{!*GWAXTQBp`^250!Q0 zmHZ0-Z8UDx)Ykjm!;B*F7u`5eU2cF^w6}#J-DIk zg#7stX6v;V9^Tg~l7PnwmVnn6_J)^Qz*E3ixMEwaM?6kt1A(vqooPFpaD&zTrmLUx zF{6sTEwjH^6!(F+js;KUS#J^~xQ?ffi{F(i3Z6V~>&(oSf!Wk5)si#m?m;gUe(q7D zT)-O0sLU~Y2YsQlZw#nb*slk&AG~54o;oKleAb~1SD3>fU(xKH+tgxmozio{9I= znn{y2#3a~YaIouU0YCsA**n|sNsm%j)G##7ltmKm)hi|P7W^_(g1Xg2l5taE#wWo@ zSnrUreq5fS#$pYp5R+Y#nKCn02g<*hwdD^=GVkv@7+3gn){z3crd=LV{M)kZws+phdK1S3=#m7wsJlL^Gfvh9)Xg=&M)&e6OE16~Zu&(f5?n)mC`VFl|} z2dRz6C+|!C%o!E>3$!LcQ2pXYJ*os&HbhQVo|0_KIIq^muk&|X$z?@ywZXg^WR_Qi z^7X7RdfBKf;bE22yNT&Rwdy~nr*gosE+lif$V76^gj_JY6MNQ?^SepeBp6?e5J}PL zsDwok>u7MQQ5-Y@lH*mw@{UI|3dHOOjyt9o6h)5;NVUk0x`>`Q?JL**^d9J~V0s&8 z&d2qBF;3tI2vTV2l(T8LS^5bEPwk_Ei&s|Kz$rTEpbOcJEjMj05GzzDw8x1}-#E!b z4Wtw#lyaeEL$g89gG##iX+MI=M*n}X$bj)Dzss@l z$7c%`>5_SyJk1gBCmY;g(?<0v@xQ#zbOe+gHl&!z>mE(H< z)2|_Q`7@_~%$(VBz8gFUwB%Q*tG$JfjTs-oIK%UwUA#B?Ws6;Zy6-ck0mS1h@}&}( z*euiz+F1s<&~p~;cAn`O0QsGW=@3bA>4XMGlt#GQ)b^Mpi+aO>*=VTx%+awNVMPZ! zj;eI9(oCcv`~`#zZ2G7E_a$W+_?#el;Qkp0&BL4401~C#UV-S}?ha&|Bu3EO?;c0| zUeH9)vQ`jo^$$^6(da)c3>f-NeU559xf4OUnHc5D=xMsB%}Ju~3nmZ7e}eJk2zTFh z=SL>LNchB3DpTr2DCsk74RwY_Mt6aSu@ZK!9vfTS9fT>?R}zxJ2fJe#U$L#AR3!C> zZC?PbpTVcSre}hKk76<|lP6xv$Hf`z?zWQe^PcjT%Xpqe$^+Y}r!}tqII;S^n6KJ~ zG6@!hvu=YH=tiF8_4ncf+wHT+R@2f|>aK_;O9WC`J7CC`TO?Y0yzsK;&g$T;m$Mrd z2Zeq-lC~!`BVP(L+3fVi&cP#jOGhBW1^HT-2C0?F?Lw76i?`54-K{nzTW@HxUDDqh z+#NVas;xV&b%9ot;(NBfZFU}KlU>a-eQ|=d<@n*z&KQ^LhtJU3b<*KaD$w}`&9*v~ zB3rNfQ0!UJQ_6={<83cqK@0)NS=|ip#?@mtTA`>XR7kuHVkB-t&=)7!XN)${&lmYr zjSXP;AH7wx=;cN5>XPTxzI{Wz!i<-k%A3XJvACoh<&C!Cm9bYiE>3R3lA0(ILpEL` z=w)5qInF<55pTw+QhJB6V)P3ypcNkq&ckpqfq(@#Uue(BuW&rDUXa(GyA#-7Nlc_K zeRTeG^8fItiuXR>>^*B**@}k2QJkX%Oxs#WTuVxG5Ezc;D+b z8fzPInM#U?J$8-6Mqik^mCec<2RG~{S6=1GqsR=nuA#(>J&pjvKl%$pEijv)lfMl1 zZ*br2HA(;cT$UX^xSeg4v-_6W`Uo|X3Haga@xQ?#T->h$8= z0CL=JjwQ9%b{`C)WZJY7eCV4|s8=NJvo~|KK9E@ynW%k5RKU;?GX@1e^9)l!bkMoY zzXxJp550L@Yc4E&(I_ryYRju@X=#V-T}fEdnE;PV{cmHk=`HMA(T$qoe<=zI69${Y zc21XSaSO&8GNcd)Z#5CO(54%7Er3*iS`#exLVaOc2eRJGz8jcu20#lkzxu<8rPLn+ z+YPoR{B^rPXbU2)yqh`{tFKc|&W@RL)TlJe#`npHTxQf7tpjngY`d(Yl z=9h5BwA;t|Ab6rZp8&K4Vp)U#jeny%>w-`Xd3KVt7ytya0VVq@GtAdpbw?(AOkV8f z?7^EM=thbwwhJBbLv?+?bPQXaSlfU%DZQM`sa~$u_|)$PMyEOjzgGuPJrZm=vK3&% zWhPQ4@7eK-pAGs-XA?)SsM*JYHNW?-s@5D*SsVgfJYDhxFo>gY2o30dp6lPvmZ?3C4yIw2$jEjDmH|H2k~bt6V#SVtr&p##S|MA4^|EP65RJ&VnkhP#U*LRJZS_( zwrj^qOBUV0Ig%0mT{@AHGN_~FgNFepDxNge7gR8vAS#V3{xBj<1~W59B%F_{ZE>BA z>kAK^3=$O=1GcURxjR#iN-T8Ss$iyf_tQ*NA;2_q1q^@Ik}%`n=h){IzhH|;FDw<; zHeIjprsHg?&*8hN)chaE*PHuGAi_|W%Q+hV2J5zawDdfkz!N9Rb;6UGwQRO&87rUb zI60dLMWk86ckB%2&{gaEMP|$4|G`TB=SPHL2mUGX(^kGr%~nPNGW3t%%yVbJyL!u_ z+NQz^Ren6@4K~eOL>C?%GB5Tl)$&^Rz>GZ>WgXph|2G#!!7ccYcVWKyk`-I(Em97Vnb-fwSjf(plC z&8Vby{;}F4L{b&ADb^t4D`^ZG1q>-Y>Cez=<>+-X0RLyBp7PnM}SqYF4XY+w@ zi66vphDNw``A)s>CwdQm48S9aX2`xRcrb)%R3}sXGV2==C2N_9gq|orU#vLCOikwL zdM&1c`xRDr+)Ng%B+I{md(*K(IS3KIGMq(-v;f1IT&&gXmwRXw?pUMk?k4Zs!jVEvy}SQtT}&s1Ds-AjHI}k_68X}g9Sro} zj6HBVwGDb69F}Vzr9NClFi0BE?Z#iJS7XpC!!>-zrvn?wq-?r`i3>)R52tOPjHb4D znGz^p9XiF9}nEyp`a%rq86wzvVBF{}HRR$gm82L;xJYEe9uE94-Z zyCX%O&As>7KJR3(Rg{D0^J^|1@2+GULZre!68a9pL!JHB$PC20P@tv2J2iqwL(B$$ znqi8UIC>f{;VSO|H%gGLm_;MvY7Eak@gQ) z8uiqof^gsQG@z3U_X;ac?rvjNev=Qqv>kl)ZGLp^ndeq^uBj_>Z5=J?w2jK#LKvRI z0j_(H{hzZD-ukawQVx|z9H?12xW8YlcSDlWWI7kC*7vU*WV8s%AO^Afi}w_5T$&kP zKIzPM@&i07B32wW4{{+$k(&e7xPsA{9Z}TXoxyM29l=IqxZP14K*PX_Xbxa@U`5E3 zc>Vt7hD-v3Q-Np9&!gos0AfRFxTJ%f8E2^>Ir{luj?W%t32KHL{WI~moDFVp=*OA~ z$A+;-t3k*9=w_!89wjkJ=clHo#fa&P=l*PGx717MZ@ABlyhEw2+5f!MDB(Yn29@y9 zCbd|g&NWScJ}DxO)D=V|wE8w!5wL0Myo@yi&aPK!M@$#$z_E<83IA0|l5bY<^Ju*R zbM;es0!Qa+7yOXn0l93ZB;LPg70SHXPm41*zB#L}T7<7%rQykZn|>HX%W`x1#5Je9 z1mdH9)c;`}71dT?zU!ywa!Q)!tbYVSg=wDDuRW$s;^Z34uBhEW@ky9kEL`pA&>aQQ z`_kC+Yp1@QZbFzHg@5k6mDr8a*<>o6Y9ZVn*-+21#r4b241)^1BHjfK*a0AiN6u$3 znpSffyDzA2HMe;+&#caS1$qY3JuInf%Wd=)AX=(_O$Cq&H)VMaVDh`C)p;|NZ2}MX zmQ{Bw(sZZN#$&RX+|_>C(u!d67<|S6lAw6b0SGK5SVm1l5m^-=0Z@0;9c1fw!-D-^ zRlO6?xJOFetx++;X9pZ~J!bpQe84NI5({q;rQ2)ukG}}pw`)&8rq2*|JtBzU!U^mV zkk>@52Q*t7{}t8664td2&xD^v67y`#DY+9UYnb#SOkBkS3)5pVCI07}u70K7(XA^J zeMKn0TAaCu_*oV&9}}u-O6J(S_S%FHe@;Y5miaG_)m#mU0=BRUDma9dQTv}L8FZws zP_V@Qmjn-tSt<>sEkXOouQ>R12G-_}Gu*@}>PyO)b@MxkKZxS<{c!eVTbWo!OozYZ z?M>}yUP%4evyjUA6*Q4Tgn#h6*32|1l_bZHPbtX5%*0AmQPf9LfQz*i2{_&%ng+`dwG$Y)hsz!bMkT~+Y~$YE-xECOs5 zo9!?nL#t$pE>2@$n&_=zB5A9zHXgIa>D?hJ;VH-)iwt3-hnJ0lAtM?e{gMU1^k$LV z#QBchVMTC@LeFj-OMqZYy<+*8Al`lAE4-H#lL;(@Gp{eW zjf1Pec=Kq}&^RhOxo28t2JF#)y&YmMmJ0CdxFOywCt>Ly`2V8r-R5cR_`64sDgmcJ z7cB9pG9ppz8qp^TCP14~|8d^SgmigIrg%&&33GbRH?+dvMbEJ@G5tG(F|~e=t_Gbx z1hTTS1p5E8o#j5f!^YYeD6W`mG6K% znD>}*ZhF*)wlBozqXs?OU1!1lB%yXm%96G{tHr(EL;#j+>JQg`d_ydYMY^>5B}2@@ zdURqvBnqo?Y@J-`AER+Had=z`q*v^Ns%xEz3Unii@g}HHq8-EC2zE}2gZl4|t%z7N zzDiZ4+{mO-WJepOOLG(L(-5XPVpfFm3;NT#YDqm@E;DkdS~6;A{1`=%#81koiksmP z9I%8@%7=OeQpXr{dS&vl=*qtW3pFmtD?#q|OZZ z;Oz+S`*w86IX%_&CvsMaV~W1@tg0zF+nIjaQzAik8wNQ0F9ap0J7OpfrMZ-o7V;cwS z$VfbF?`3onwwwo+!H=+hH;cRx56*o?a=D+-TNkS*F1};$j}leHA$2AF-eEovCff4| zl_^dav}8m(Bjmg7ynOaKF)OpeI$4KyFIEiycDo=)fjm_cGlF6v*uOtNP%}S>tz9B> zQ0T-J=)d<&71GxDznPHf@emg`O%LmO#6KV2QSY4yimbMiHfwVX&-0-rwfNFs#XE+b z^zjo_U54!6QRrM;gg;&=6RlVkwzr}F$6_YRhbB8*TyO-@+@s8hT$eUW>`mdbvO zwY%g0%vTz+9kmW6ZeOg#@>rY?g4R~OZVj{NsQk2tI?n%+lZp4s=G>5&OMl93$iZFZ{tRwHbHRZsc z`DTk;48H|n5PAOcg=c?K8n`$156Te_M4JpKi?9^el0G%IE!h0T=hCG76|(U_iZleJ zBB>oWcp)p(h0D_axiJ>!LxnO7IM-oZBB=A$qn$OV7HP1$*R(7gYDB_AuM}xgUwG8? z;P2>v6xMkmV*-`I4|O?x!r>?)bfat9?q=Jp{YFXVjGJ3%XTGd(uL-P4XHQG{(R=Z) zZmJGn*$KryMeL{hh)+A75IA|6lV9(ACjDGB-GS4WMP1p=N; zZ;$hUS$xdj`4*utiJ_Tevi_7B>0@iec!!5TrL6!P34VJ;4e{3*X0*i2?4Z6E&0j*R zr%l`|>5$xz^HbLoXftjoN>iXsEn1exVp3j)EGMLo1k& zY`t+%ueqsNCoHmU0atb3SMI#z(duP z?$b+tj1T1(j*vDb58gS`DXGuT9gE%ip#8NJy-X0A8-T;a!03#Rf1H$9thRcDjZcpt ztq>%@hyNYFjOPVC{0lH@S~Za<_9aZBYF$7aGZqDQ9EB(b(!<6|qOc^EEuBFzxJpOp zg1t7JLq2_{hyvzjCt+c5w?*eYd&E~`xs(s2TOJ-D|p`{IdkNq#!|4=^sF5Z(q6q>R4s&@SreXq1l&_HZI*8{Fq4=nmRYyn+F_;&=b&yx~^s z&7&{Z15tZ|dfcoKIPdB<^VaP*3=vaOo;Ny^71BGVVLs^}nyh63jrfff@*Q3T6u^dv zb}r2A6dp>2(;(AB)wVGzE2P5G^Ohvdu+viYf!aswLUJ5eG}lR^qcaL3J_Tzxyu_Rv zWbfku)xBXF(Hqo!C&$qD@YEI`F9wQi;}D3iaH4bI#~sIWS))C6ND47)xQO;3bT6@yfFwsfwKQYL8*sa_Wvki zGeog?-iW}V;ALuBp6FL{)qF^yT*h+96|S&>7C~Kd4%I`i5xj^<82FRN&h|bzw33Ao zYEB!qL%H^>A?9lFy@ppl*NtFY#-zFVCo5vP{QfY+m9^f4Ev&8FSE=}neY3^8Lqkc$ z!wwj&oleZaj&DE~hc^%n8An>$895NIv3k@wGZq5K{ZQ$PN}YGHTohS{G{D?1IrV3^ zvi`GPBBQH#hr8Nk1$EUCH0ihdwY9<>Wa@)jx-eH2$8$x5vA*N+Bzk(mx?;siU-Z^u zpOZ%2{E<#<=oB~Qcb9u;t3uYDN04M)?GOBdCSn93!a979_jL>GpUe>7o?H0nVl$T3 zBJfC>MSM@79Q%kUC+8mnC*3_z6-;TSzZn5pFNX4u-jsGed_B<@?zOQtHqC81GldLq zM>aPyy_S%%v!u~_rL{z^&v>nMR_-Amb)kzjbrG9qzxTxab)4NkYXSb+JOJSIy9Lbz6asid{GFZF7yd zMU;%uz8`-$Az7+F?V08>O*EQ(nX>ii6g*Xrl+Io;h>{#pk52{;t^6*>EJ zynz~xs|*TfW+_8$nxQdQpyN9En5oM=7_7xD zlV$5qHdLcZ;SH+rt!xQ@2>tQcl9y4m#;&PG6{SHQxSDfh)_bk0dlt}&WEjB=Xk684 z3?$%t2(ZaX%p5_NjFZ$I)ngjbquZy%JRG}lkXDc*YnfGx^*7VlxRjbB1M0x>$h5M> z+OB}a)TF}Rhl~qRVP+C<&2|A=Ky-xw(e?aRqK4s4bi+Iupx*l{zl!AKpFvb85CUu| zJa$epYKvJ(+2TshuDmY%rCjBVTi>+O6ZwG)M*pjp3<6#JPGfkgO{z^^KXp0wDETyamNl&@WFYJS-oGVN0g8c#>`l#oU_QHVT)b{u=fU$sB#SY_oSvG z_s$j7{cChT&j<<}T=u`+Of z*rry&A$h^yp7v9SMYDjj6MgoT6GwX6#CpmQ_J8^YWtn8RT0s2~MBL`c9+w+&M@QCg z*V_M0#HuEFCoq|ia2?(#>SV@Y^#k_87_V1_XOgk=K!KZ0Cjw})oP&fIs?76VM33B)5_g%W&qrajydZ&X`KHm%Y<5ez7~ zS*B~>m~K+-Odw3g&%roqvk#S*>-x_bJfXB?yHV(%mXjcGW~>+m2QZayn0Q>vJ|9q*ps~{WuU+$)3OjEpFuE-iuPOU=sImIRpMp@$m->tV0H zXF*r9do=*Uwu+#cxWxb7UoH7@JV=s0ghm|pvl7EPCI;J+!O{L`q6L`x(*=&YR5i`R zs2V&%wwyVH99~=?lt=gw^qiju;VvM=j6DhReO;wFS^lW1Q%z!UsVV!I>6E({*WLRt zm5CjwiIn6JIP}WgU*kI}GGWj~VYZwz6vUU-TF`PBtw>b&CS-XAh5L}IOh(HHbVSUk ze$+Q%U1Wvs4KUUa$Ecny&568Fgq0gmx&uR&A~HyLo+k{qSpDAcCh|+hq6qtbj|3E$8L%{4gOl zt{>Jk>;)Bd9~Ltx0mtK+*O6i)EIIRV_hD66p#B8QK!=-<@L*zah1t#7#fl(28fU2<#8!>jH$I|ZA4xbJo5&K+g<&T8zXi}%SVXriKi7RfzAeEb{o z@aMqjep>PVH&Bl0<%~+L@5muU>@Sb~$r`qu?_&QLg?mkdz7pGsdNMQ;(xW13g}ls6 zuN}JhefT4WBW`DN)4VJM6jyF4zuU}JGb?=b>-OqGCgTt17ZRF0Xus;NwsP?rn|G3^TRlRF*t)v~Fi(O6A6dj^?5LUUy!U z^N4=I1W-#rS8ez1QWiy|9v_U$_;7!qhYA_V8vbs(|Cok~r`a-Z#o^&PNW?+MZE5NG zXtjhyh_5VOnSDcgI)EHnqIXCxYp3w#^;*vQ#gLEpwUmV9{ege=%?NGxC;z}?!s$bh zOJTp)8!se`1iJ4XhHRB#U1=5NR8?ra;NttTo)F-%S+1c#GfDER1-h`}pA0(TW5-*O zv7ZN?h^qqOV@3^MtaM%|qdwpsQ#xEXO1dpjNS=PLQ^!wB>Cu?bR3DqiQGZ2H=mlA1 za~38Pk)Q*#xlSh(@)}!IOL$zCYkDst)~#i`^pIlO)7`VCLHGF5m7n_doF%P1CG%%{ z&c4<9=DR(r`_9yV5XFM6w4lI@%MoWxb8#?#D1-=XN$G2MPBilnauvgUDr#`w*JqDdR-K7hI=F;F{u}kE~$__>YRzmcp-eM zA-{F%V^_l>KfexFX0C>OBCE0_ND=_9-I?&(i^esXVO|Le?^-#iayFk&gFUKeP$NGw zld*iu)4@^p|5csU=$JfNdonm5)X{ z#55x={B1dW2$ea{vYmE!S}Y_N-DP%~L4lN`g)BNhwa>a#=!4yLyc z0+}*~na|KEn&K$mgr>YQs0tgJKq!`Zz*uWTlR4+AKm_g$w^zs&VB{jof1om-`!r?x zN*#?R!O>CNXi6H>(m{*uCg1C* z9xojiWSNr?=g)EX0wm&2R!zYknoi9Iz^Lir&#knU6uFq%bE@y;KoyTzb|hMwJ!YgK z_z2n@(OZ~3c~=rVgChJTNf8Hk1Ku_835gsc4a+(ub+F1`wj-3jU!s!MBY1SR*ShS8 zo85)o5;M~?>FGqVE%q&*Z&Jz z{?|zGF#fp^C5qe5XI&Xm!9avTavAwGRgEjzl!%f6H0~6IpZy08*bn(a;>ZBqTbQP2)e@p>I|T3^N5o z!&cz4;Fw>$8ENT6Gw?6u%jLjIT~eboHy$SkqsdvhCounhAK)R1=tK-2^&J<*~mz$;1vs;)lcdfbr-DIZ^~QnU-Sma1f}4?}X+wE~tfG zc=O>Bs2(;P4a5YX5%slG{~f@7gXE6y2ezZkC7l&9^P5m)jL)T zSK~ZL0@QQEe*KXxF>OIOt_>+WNvmVPD4|KTEvT0^3;R5T8& zIFA?}U)|d1D{A4X=Bk>u4O5&C>kFc-SLel)FK{92e8wT3nS2Q~0m>wVZnOHv}6&~Cz zzE;8yIo@@ok43BP)c(Uv&g#|FfLQSKSgK0V6H1pe2ZeTdTc}I3PD-i;FaQCeSlnq| zTP`)dQBBy0mFPxk#ExmH*}_OdhSZ)^`ZuopOi6v#nzd#cx|xDx>)-oz%13rN%t$H( zDgZ>^SX3R16{ah8`C@my+pLo%F@!O!4KO{FKdS62-R9)*%r^XnOs<0N$E%VMMTO#0 z8Vx^W@RI6TP=g0kzW+373O_GM*>`pj4@*m}@rOV`2`+SNXj$r#ILoKp7ELXDz&ycm#Bx zoR%6!<3e)9jY~B>8OBSCX}c6b{gGg)kGEw{mkb_{ii7%7Z3Of~4HKY~A!INmUJt%G z%LHn%hF-Vf29`qLb(qh2leri%5xEv=iZthv$Z$1;Y|8l9=mjQZZbM?e%Zteap&Y7r zij)vFvY`D>^26zYqJJGtJl$;BDF~>{m-i z&`u(ng2jnF<6#0~N@K#9c>rzwkgIL7h!kf8uuIPjQX+(C!pMeTfKcPOf1idX9)5fr z`j&faX%p0Fq8E>OZ+0aoQ$k6~B>LIymLY^crK>& zQ%}XF$9C?~%&)wyF;-`RA#k3%IN#O7q#85Q=$S1-)k(hmuO#I^xtq)*s{6MskHYH7 zYQvl2WnW!OQsKcF_}rM$tRR=d`gD`pQS&XE662z521c&5(3p+ZCzINb2#v`by1oit zcZxaf4}udLgk(e-{-zrAoR;~_P{cYpqLMMG9fB0&|3SC^H@&q1Oxmb}L2LF2^|eLQ zWY6#F4?}yL9zUMRMmi3-k_&DG(J$0XvI?Po$wa#wDp@nRA1iRB?XlU#WVTi)J9~%* zD(qq1I^pEVkDmk|Z05jz*9^nbg%|C4PZjzr7TcZBX|dTBfZ&ZA9*?_HL$sc1Gqbe;q)VddMXHxy3d%ZHP-!CWN&nQ zU|zMediU6^YVUW*!(_r~6jLH+dPoJj{28VkbHbC?4!8ck4MxV_o8`2ld>k`i`dHiR z_?3o_ooLzdY+$ln7~AiiF61)1x8M})VmXaVrt^t7frCP(XOgGI#g;LI!azc1#6hGp zkObylh5F>!VnC!TS|9ug)E^t5BUUKf#6ccfpij8y_m1DER;W~#vD(yIsc~b!oo^u;6e5*EesRc^2T){S?JG63;XgmCY2AYY8AmvZI zlzbmV)C8c^9dstHiD8$nA=22aeHgI6h^vX~+-fdjy356X<~cY%vlT72_5thXafV2?ck3;xe|1TZ^xB`1cG;dG;-D{iN>D%g+au@ zcGM&9{JALO`IEnTP~&rQW{XuliG3>r(xAUy9dp!rFVKIDzbA2uGB2?G>1Z(@v5n80 z)G))RsU)Jy!M5g{Z?@XduxT0$J(c32U(R8N3NH+9-Q9Yfo0Wlp=Qoaq(Bmlu`)L8G zuy%qL-%K+3W?Xi?_H|qO#OG5+uB$mVsRjQZVXPS4djyBpw+kl%7xJ1pA{KQHD&qSO4F90z+Rf5gHC(P9s zJtW$|($G_k@?N9;%b?o-lAWvqs{6U=wsSvRvCrHH+F9q6pHJ|U*V4%b8?t4duQUhY zrC8_K-k*CO2$&df*a8e2;!Uo^Lk0t%`d7;qa_$Sf{C`b;J(F9O%ukEi*ujNlIi?#@ z7(p+kavDh1`E3IPjbQc*+@mwcl1a=Fcgv$n4`@~6k-JdIp6uCMCmEuKf@RqsMNEB# z1fa?q`{sFFtl{m_*olB5Ii?o=vq0C`9^T)^Gc|a8*ReoK^B2XSxf~D3uo5{7p9o; zG2Uf13V})&pqMXcm1WqLcj(Bl^C@JrMjo#e389ACIKoJRk@Hl_3E`1n=qGnMwrN5Z zy^m+qIxS?WQ*gO_X}z& z9IA;IWT!wl`^7}=ZsbFTTK_$FLNDP@xsp# z8|7g+sP`hV)$I?GnzWpDr-YTfB2HMsP4#C~sgKpei zg1fuByF+kycL?ro!7aGELvRl+fnNW)eXCFP%YK2{1y!)ensbivjRo92#nN!mM51wF z5O-vBQn2S1fR+ixWDmRBhe({+>J>&AeId;z7yyC&j`50$Iida@@2^cN__M)EdQk-_ zTxE@gHgk$vTtM7-2%SQV&)j<;U8VS6$GkB_A{O>;c4Mm)ZUflwDEJUA0a!1%3+*IW z!{h~S@W$rvG^|AhQqWYBy=u7I=c(Rw46WH}0UBr%cthv%4@NW%c)yjD&E`O~n8_2Dmq+@{Wc-hY za{KB0uG6P!=XztOTE?Yg-k19w^dA8evQp*O5s&xZ`-PR4ztF88>S7#X%KX(x$kL%a z!~bi23VWN5{K5F*#U1c@>ADUi8M@`Cb_&tx)oINXorH904Z4k6r28M0`VD8DEI|Fn z-fhy?&VJ6S5hM-h@@??)3Y7GWd(*~cZ7~+TuAUP{cRhijkwQ+C^hX8l#+d}8kReO5 zc&&MXph$vIYvcmsqKdd0avY$y4F2~LSF395jKt0ju+8cpAQ%uH<$FsBt$S}{r}En=QR zzWo@zxj15c)?16(piUe7e0Be00X9F&`n^B zKmIu3@OdG5Pp^etMavhwHNx}#yAM>u23Zdq$?CVFR=L{_=7w`7q!U6|`J;*C<&2gD z!{0o=Q|a>mLVI(c7B^hA)4b%-?`j`13DZtQ6zpZ?^7aJ#uX3P_6ZzokkiXs26%naA zJA$?Ai0o`k4RV_+6abx&OkVR4ZSwu-N_QuJAEPu)8lxdaAmyCd-A#hih0mG4zJNqZk4o-%P8msL0=(fa*DA{kEeXl(a^_?1e^w;VzGCV7N z1M&_gO2;0nzH;4P|r@R@9f9+x%KCsFqZf^jeOud0$ zf~P4k88(jb;>*q&la^oT|l?di=X{y3xJ!o5c#!d zo3UP-ToA8jzYKQz1{&q!8ymj8+WOhu7rM0hAp^Mu(PJw1xe8p~9yP>!OSNPM4asAP z7EtMg#wuqE?|WM8&;k^Mla)R4Mb;LW3QWN0V^#1~^}jt}F;8YuS2BRHG| zULJR~&l?5G`@=WY6G4QYUIkbBC3&j0$fd(WxA4pVFv2&VE*Wq*>94yWzhoJ^Mp3h7 zArt0)^R@asl!1fC5*udA1CEX34O6#Kg5mg67#N+CHpY&34l z9J9=WMx2fEZC~)j8DqS0lfhs1#ZFd1l-2xQTJT;*&?#uBU8UgVup@XW7fttl#*2=9 z(x;^iSm-H@;55(r-C%>4PIYOxWSW1Q<;=REb^f)r%0>nOj{-({+-? zRBcTu{P2i%G8gq{eih}JG!HGhWMLFcgl8fkz{`>_{8LS)9bGFc`zwP_DOL>uR(H~t z3Rssr>P{~jj*Mn@(g3~Xp9^jHro?2TU~nfi1E%!3yYb*68W<=o+V8P&qUOdFQ9~8L z{ZVANl&3Xc{9BmZw;wW@Fm+jon&kqZ@F4*qz*1R92Cx5PVBWRe%*ruwH8A802z!Yp z?dO0}iguzq4^e53CeHBOtcA)q3um-&;XFKWEoo*`+izwn&ev!_Pb9*wy?!O7EPJSn z@jsHd2T6C^FJd77-!W9*-!V>={J%&0#;BHJl>UhTNGP7lV4-Vl?@PXX+0>}?amVL* zWRZmV1eY^LEK7LuG2T&PKgO#sP2GvWf1ln4{iYao`o$#2UeeAe{Z-T`sg99l38Y>k z+3*$>&P@&13hsU_zD9t@8$Gn?Ry`gi$-&FWO`1WMwnF|W3e773I>LqNSFOqrYl%82 z{;hQZG2#nUF2f9{HgVCf!o&cml_e%5M5B#?HGid`>Ta43DCpOYzZytgTXDO)hPLa? zaQxo)eO=c*Z`6*mZgGMi&n7W=>|bG!iNr)iz~8+3B;Ni<;XaW^1i?pS6rk_iI)G>g z8hrhyJ%Kr~Z&d%e!{1BQGSAJpH%+%kTt>(easf%G*fltZNX0VE|Gh!|INJkattI)b znJ-<+&X_iADb`s9%mC{=d9_TVK(lV4&z9xJdswwNLs=f3SO$$DwT_S&)r=z z4JTUNZ5B`Iaj$Mek5d<}ibv*-z-|F9-AwDwdD3?@1dWJ5Q2(xUplm@#^wa`RPi*94 zF>U0sljdZrN>|ER))_X?L(o`Ja{(nQ8>TGH`wu|za=Ydo&vuS0nLd)KU5XWrN74tw zDEojK7bTvl%}IFJ(W8oNHB+=`TC2QX506XKQ&PDM6-`NQUV%=Ub$p#P#J(%H;s$el zWcr+00+-SEutOD{gWW9Dc3(rHT7tajzAF5J-`>zw+6X0FNrMMN)T5~Ge%N_A+Od5$ zIte&VaNL=~-{U)if0t3CVEaQDk*447F8k~9wm?70*M`vSV>RRM{qAswt%#-;W@+ZM zmBTOm>Cewsrwmo%@PT;bQAzUlZ#}~DmAn@5G8A!v+Iaj&Do(Q_wKqLa)RoM1;oCg? zN!wq(5j&pn5}z5PyEsD4Rzp5@dg5?8jVF;5)E}FZ-}yS+aGs_egrsN9`1pJAc>!8j zjSqFfoXoB}u3-~RVlHlw4K~N#WYCMJvlnNc?%H77T2MAM$No_3TD!Y1;lRII^4&+P z5dQX<*>DZVhZFBj{w|0}Bjm(nR2U^|f+C{rk)>EVN7uhNIr%Jv8fEl-OI9+DX3NsL zd94H%l9PiUj0X+3ZD5-J)I6$Rf6*;Bo{n658%@Of)nBd%aBbGoy;DC4?_EhT6XYWw z+;+?oXMMWu5S9R{5M#;Cg)MGME+Q%Fu`ws-o z85qyp%+#Bl9Ix{2qF0}9p;Y-S5B{}hZ)mqcq(4zbcUkWa*0QTKG|%Q_uCl|Nw7)|# zZ}o;z$E#0T#O+3gG_07pcj4R65yg7n&28~tH-RxG%& z9Nn@~B4r1dS)e=Fss>fQKbdX3wp2t5{P}J#oX~;S$?k{0mEAq{SQEtKhuI^Ex-QK@+>#T^Djj+#tJhja>oa&s+7x8qqR7WDkZVPuAR5hLd zgWRBU?R((%b2PGsxyWC}!nbhiOCc33hT#oBZoRIrCEkyplVi3Ss{0k@tl^HRm%SqV z1XLoJv|*jF_(8yo=p7S_j~c1WHIAiQzS}Xdy|z+mzpIAlmNAa}+eUd3tb?&w;#B%* z>lI$v)f1b{`BEjJb(=hc4Gy@~CC!c{i<I83Y%8GL4a6c?EIJ)H*+o7wg>zjMP3n0tWE zT_<)KeWrszQ|Qb`!yhu^1)N%itawHs{5>a4#WS~lTSXy;DO174CC4s?S(`f}YU!l`SnhR6c!bdI?OnMqA5F zgp_aEYD3VhP#0N*j_Xs3l|Sn>Wr)CB^R$*aWdoX)#GvFlnVb)PeTrFgbme- z{4E{#LtCluDtSv(ACI1gyH}F6i?dsMd@7k` ztR>-2ToJqBm}FW+*qQI=snhIHY5(N(x#0|82$chiqAMAPDk<=Xix?B9CpQ~V6EkGP zM3VYSAU;+{3+3*Nfvp&I1exJ{U^sajWSmQbb~dRuZ#Hj$3?RR)VBCMSId?GO4!nMBoO^k9G;M3j9e zA(1uY8b_7_fqgfZj=Z(_Dn!OB32ql{w6W@?asZx)NBk$7rKHJA$(Pv~O(f%oSFDMZ zHCIZeYruv6aO6ea+8xdBz~TgcmB+Ok?-5lU9?G1-eTroX`Yvejfueg$iqY^}wuL_w zwIL%ZVOgzjx}$)#i?)1BN_Lj(TMWT(VOz!HaOO%6KnP8OAr0R>gi*LSII!LxF9!kW z^z9gR{yPBFX|vb6zuLmrX|osq{vB4AQDb=S|45|6&<=!&G>HGa4}B8>eQ)+3Bs~{i zV4}{|4c>6G-N}fKrY{+a#k(?UChf6X)G7Wun*hfD9v?|R)`hR*$4G#xpLN@L;W`y3 z4N(97N)QOlI97XXShztQFj%$kbn`}{0H{L47a|D zb+18M>1erdvg8QKB)kxI3=HUNTCfC=ld$+$55t?cQ_LQ+CiFS01QQm`gRN%4j*bVA zIjbPtevd~Xa;XrhY(y)2$f)`E1avRLNaT33qiL`qe(orm-oXeTcdS#?l|(d8qK1#M z-2fMC@Y<^%pCy;DJ@vRr)Y!l!jd((SVA3#X@jf$HoX;)M~8 z!8itVR3dNOF{gOPyLSn`o*oD&+Wv@64=i2{xxO`c9VL<}!+>~aQZ#XLu3a?7e9c(u zl}A$28)%X1T#40xtgta5<~wsV0#0Q zKd>1O#m$GrbtQH=+@bci3ca-^d0%fuUUvc2K>W9D=ppMj3}2b3H>xB)*|zR~P{nvi%cL zSc6HJyXl*UWdtc9zOki6>*fm}WzTT0ZEq;u+U#O=fbFx&{8$|=$Yf&$}M;<=G4qpAjjkW}YXVO~c8UUNm;7BT{E;c~EQbOWtkC+k_Z4=l0Ypu7GBh|5`K%1(WX!G?vL2g+MaBCQyHT&c0YWa_F{o{iu>r)e74q%h13wB4t z$q5V&GVS=l#k4$}k(i!oYOc=I{TH?fm@5w%t~K z=^0JFnE{1xxXe-CyAYWlocUPS ze46$e8cGW%IAK$1y{SCA3YZcim}+m#C}))po0UtpK@(0<$}tgrcW??@ZOw$ZUz?_; z(P#0tz|`eL*pYEZU{CcAE-Q#^38S>QeL#ot0?$-+_rso*vFfed{Y68H@{+cDW%;6W+={OysW-Hsbe{5}DAiP`f-S60 z@k2Qa0(;}iNy`vqO-g4vBSRS^UiQZ=G8Kv}LZzr6<&Ov*li?HPew^u#3G?nGy|$M#e5K$dG z5sf(Fr)Fy&L)j1%DP-_o!mj7u8R5kIssW(pe`!0|S|vaGro zjmv~b27>S*sYap5BU==%j4Ms(%>w}5MX3!ye$fCw;BqH&#!`v0u2hgM;7YobB$ITK zH6Z4QsyS0HLJ^0X5W-`f!P>E@=dMrh3?or4lo zKe|$9+t41Jsex^*$8ht2+-2 zN2*)g(lAqj@qFs9z-1MrhV9r{0z_kH3YsKL5^8?P{h#5pGU$d&C4FX7;+Zm$&OWH) zYt5@vlKS)Hy4#%i_(!h&n+>tdR=XM*pnJ(l`~_HeYPP|@xViP-?jE)}>&WqSc9KTI z2!HR$Nu|ADr8TAhXOS5kNrPT=(;#ZL*|N^J(uzB4ZJb< z({lj}F0kQ#XOp1c4o_S-Ht8?IZTB|fOd`7`Kk1jM&fI}1e)9`DyzAT_8zhhLbjP)b zpBxOX(osyo4EouJzuIIDjzp{<6*CEIS{8aRw{^hfVoJ$KtQUF`SoJ;9H^TC>Dgv<` z>Cnv!=>KqUtPTecw*j_K|9Y z{R4mGL%&eM!O-@A6BM=>QgND%Gr?A?g&E@@Z&hApVhdlo+`?Qs)rB+6*2tjJ1&yP6@I@YG(;CgAc z<(Btk``#|EF^k|g1P|L#`L=^!2gz(0Qph5~CiHVR7i_88a$e%<*Gw*QD8F zt$2k^%LwhW!5FiN^wU!rbP6<%1`1F_>8ORWWK_FA=%A;>cFs}vwWig1dMv9Ty+|0db#4AWOebfDgL>{Hb zu{`#TWJ2y>Rx<||0`NMrC~I0|5=GyqIR1&;_e3=776vQZg6tAUtdfFDC4-Ufz!8T6 zIx?1gZ^&segh{7@LJ?K-r4V_mQcMmgL& zw6o)+cwa^zusn1|V^zup?v{|zbbHRuwj^k)DFv)8Qdg$Vlx2WcqfA|;>W+dSGfwk>uGu_-vECBeF9!hx_jh^~s{89L zHW{1_L|L2;ag}OSA8-UJxd#%lczf}JAAL(z8vkEI?9gZK?NeX-yxC?MI0mw&4`H&0 z;85uVYCA5Yh<73qRaba#UnBF>JgDb3B&5vYDzfe_7l^($j^~|)aJ|>u9*+=KTe*`qn2{uW;gVR*xd(<&t5I3l&lGmdOcRH43_3K|m;3eBfp z{^sq0n=ff1xm^4jS0^}lx88E|Ydo9Ah-KLme~p;4Z-GkCQ1TcngRg)(Og1Z4;Lg41 zZC~Z2C&lpA>t4$rdN7T6Xc1GV-tt9xGLr*i;uvwHMTQp@%N-tj#%|=7)g`ELrc9Hk z4E}0od!`xrtu1aaHyU4wB1+7B(S%@|3!LjxzLkwi?1_*1 zG(-M8uEf{ni}&<&(bu9j;p8xAzNyoJ*oJL8)KWp`==4mWi^GRs1~Wadr?28vupmZo z#RW&*B{z6ltK#(Q0g}L)1@A38W@vc_+GGuT>qfFWEkmmq?r7OQj`F%?FbbL1=%EbI z!t=bz#Z}BYxb*Oj9a(hKorrniEuhk#!?|qX-jR?*i@CFa#Ll75YqmL zgR>Cd(NnL{qyt;c52x6jf%Z}d6J6F)EC&cYG1jK&;F^8zVYzU*Hu3evwWzG@x!3Y7qDKQ*r0_ zaAEE*IbCr+4R%{uixTnXOLNJQ>(qvu#o~nb=d&mfya7YgpJ1=8Z#WRVeo-GO&|T;@ z#pk0ZTaWy%R@%VYRmJbmE6!#q7<6-aynXDMv19rSv)zyb&f<&8_VTVgB6H_T8P3KN z9m4f1u7`vQo?T}iP_8F-d{y)Dhxtkt=|Fv_@NnjO|EX?Acp{&s=7Bn6@tu70xg7e! z;Ujv+NcU5ZFI#{Ji?F9*$3U?dA_Zuh zTBmciUsQ+cuD!UIm;o}gnWu07%y^U$%WtDuDK8$|+)fnl);fLia4tGH9)039XgYk$ zv8Yv{AU$NgE`9P)ed^Z4tSUmZ7nbiabt|bpTcDF z;S||=y<8BD7YM(bnhM4k0(n@mv)E8&ix512uF;s~3uktIF}q+bvZ~-Araa?IDRusHCwD$PXT2F*qn7ov*`(S1D*)Eor6Wu=)9USnH4rjhr8 zwvwiy7@JYd!1(baWMyk8hycZ6^o5+a2OMMcm6ZI*N_^eQ8SN7ni#p`L^6y~Dsvn?Pgg+vZ3Q#u8*K#52f= zPy8NTU}mL(<2OT&Y#=W;#*0;ALaC&1DlTF%7w&Qng61&oti=c^HFwM)^bck1krb6CE)qe6zTXLa&#C{r zOcYAsZ2;LxI$_gVOqoKyF<`qnnJpF^ zusF%D)tyld8`nk;9Tx2bIYA-1l`0pzOr|vFG5@>C0rqw!R$4_3H^?}?itJxA@_)Sd z+%$rrfNOexsA%>-^vgCz9QbPu>g~EcuS5EE5R~`&ZA1U0Q=}RfqtU#H4MZ{pWcJK@ z(-Wk}VbK=4yt9~&E6PuETZ;cc(GN3uuAfR{H;n(2W-7ed0W(&nB&ZD;r?3{C5xcxy zT!4(4_@cqR>-TZ?uRDB9{Wa(EEvH2fDYPmkw~oNuodGgx%u4o));^7BSbwZ-GK{wZ zZq9{%H5LT@H&9L(Uk0WWjdTZu)EVN**Ze*+X~|3-C9vo_Bi17lPI5?%xSGCwDF#p_ zObL{IAh3WMDY~s;Hd&p?SpCGGB;3LXUykihuF`S5ex=&%`1O@E{*e@P1nt~3kcj7k z9XT^iPJN1s1~I*Vdy3ia;!1RtuN}9Vd`_yx4<8n^wflbP<9W6y;+4wDP5-#)2~(-C zBPmww;8^X#N>9v9a8NoexuboDO?VAibwHB9&w~cjFJE>ka$4=<@|J zWX$h}1~U2Uoen52wpo!Uu#x&g3I&bC@_|cEynVZ)J!>41ZU_S%%)#E0UBcmTF*85j z@e}=Mn%cqbpMI+${;qHrA9cg)zf$ckrqm)#Iim-;yiNg^(0rOLm~BVZ{i$&9PF@!x zIm9f;Wj<2_x6{dvMd&KWv|OA1;E$~YvE6c#dqQ^&T=UKZNSr2ju@Z*I4x;ssT>_dn zMliBfOBl&Jub8acFNE;ttpia{yoTa~ahi8d8n(}!)Es_05IX+$q3B!wqO&=Vzjb$r zUv$Dr941I2I3C5sKGj)KPnKDihmehU=EyP^)|5#^07$~P&5@y4WOrUSOLHh`Yw) zed}L{j$YY_!e-X%b?Oj_vqM+Uo4$U;-R;>=_h}-(bz>hiFjO_HnBcZ9aKCBFe8~=K zNX{O%L^L^eA?`QZapGE~4Kv^Ev2nY|{+v4r9H^?Z;@Y3T&uF!NuZ2ZH+`eZenRCLFfoQDWAVivUhNbJ&tiXH_&m~&@4 z{DI;f(xS&> zr0OC<4DU0xUUROBeSG~}I?*HHl-h}P6?*ONQ75=-i+W+tH|*45TFXiXF&8`3EbFgx z${J)P@mF?(hWX$S7c}P7_AZp&o~ngyWw}HJ59L*iC~t^!6X0bfSEtq(C~3oX(B*!T zG7R49S+nb!QcT|{E$wT=er$EzCn={S#}sX1{P`(PE=ZS{5<;JK%rks=`0{&9PE=os z=W7(JqPAMZ*IF}Oz3+BOiIUd#`>N}dmOuZk!EG*Wi!p)x6xo)FN$7||DZ)k$F<)GJ zU5!IV+yXRDS+H8tyJj^tqM&(pI>$vEV@+%IboKOU&WJe|H40)!^!4#S75Ys%)yHkqqdllHLee$XJkLWs`@c3XSlB9#QT#Iir5fbHt&Zbtapkqby192~Pjlo8hv##%pNtuB6}V4~DLCF`LKbh0+4*%EYO%TpX$;C9;4t;<+Y za?!RZYWS0KtnWF1)7#q{g^(~-L*EOkr>BRR znb{MTz~$`BZljggPzl(@}8O^B?o5auwaB#W?#8`33^#-=yLH z_eHpWC;iT2BZsK7E4sJLPQ*p!9}P`yoym7yQdQn>m^5jIPJW^c+FAH>bG0b-8Lafu zXF>)J$K?~Ajs+UNJL5Z}E^DJ;ANF~B%u<2Icraa2xL8lD^hAHMhM(pJ0@I9`AdE2S zk1CWA!q)h!ol&ZUKxLKcOI+%aq@WcVdrB$xsGuv-0*_}fnyX^3u+a++dm@6ugSdhu z^%OTGpmByb78h$_wbDvQYm~$puM=QAhoDn*LFiiiGr9*T3d>#C6e&_r`}c z&!OV`I>P<_@Kkr8kgq?oBhVYMvBMXd-y49~yZK7!|9-d8{;<-Pi1R{-1}-Q$5Pwbb z*i9F~)dazq$@p!Olf*k4Mu6uR@hB4UVA^PY@MtLV zp7SX@m%%F?1K&bR9pe8MAS%{2eU*f1jTOQ zZFWP&{&WosHgIwL=8@?JJW=>79tVf#8;;j?*M*I2n5i!n&kNz4jt4n{{+};*uphT= z&lj&+zg#Q@d%7Jl1@(Q20(g8edv|=mc5HdW>wur=m@)h|auCedw40b%^J?&oJ}Y3w zn0T9QJ#=N+7Sr2h5b0O96|Y>|{vmr$d*g3h>ibD2!*!~Q>}D*O@6?M3PQn`m8uzx2 zc4-iKdKXdH>OQ+8-*|u3J0bSm=!GV@aC{F~hv?w+O?aq=na^{V*mv_0(`yvF(Q_Z!*flo{`a`>OPxs4@daWM*`trt~W*mPGZtKDv*t><`sbd8XxdKur#anQHJBwA*U62703=Kh*MhS^u0SggqOy{EKaq5#9yIoPYM>o*@_F z6&2o1lc~j>?S+$m6hxj){fZw{D=)(kmQQU!wcO>B)Pj*kx$tG#SHtc>Gt@_)x*>bK zbF6SL$EFN0h4ZzWz5A84rac>e7{-y;daW`n<$bt+atDS$O}}_0{thSjiWn zVu9B68C2#Ry&1-%roDZ8#AB10cyN<4en}&%R+KQhMZ+F-WF)G)JySlws&%@H-yK=p z*DkMtPwM(P0eoU8miyB_e8G5<5z5%RsWWW&2F1R@;tz|(%6!;+BgsxLteQt`#P5hz zwR=$)1R6BJoK*wU*RS&?X&CzG)vo(7QiOP2ys6Lg#fQRhrwJV<35jFw zHW!?(H>8{2o?nYRcp%_SveM#FTw9Dy_)k>v{=V44P_bP>4DYC)Ct~l)j;^bf5jk*Snv!xj?>EN~WPE$@<>MV@c6&Jj52&;mVt7b( z^-q%iyO5Zl`~6s&Ns}FuhHkp>u11`0a3@Tdo6cbGCxrvlkX|c)@(gcX&@f7omJ}6hRw`7}NQ<}_f#dx(QKD9slZ?_HTFTrjH->TY zrujB6qLhY5LEP;$U=neLZAS8P6cln`v`p}nE znUCJ;{TzWIX8@_n4)e&%O!Z|B*@Y}<2JQ=id7!s5tt&)~N{;S%4pCjH-XAI0^Mk?Y31PHQz)xQhHjx&lxZm$Df3v@Y7wAOEHRi{ zIO(bud>NJkHnOp%E6$J@EX*H*Uq8Bd`S}Tn{hvu6FV=$1CemWTO`ymAN9aB)`}^7p z8gW;8UmUoB^zpV3U76<6=$^7VZfwkqG7`=FVK`pZzH_TKp<|X+w1FGfENs^;0LrpfG;6& z_Z!8w&n1Tc+qU@T%LgKb|0hIv06lBy=X_3vg?4l+vsmUkS zyYn8-w$H0pj?WqAP1keYmm$Urnx1E!_Pl?HoACzWGsQ4_y@Qzk`sejaKHQHEdhz@{ zbPWL$-*Q$hBv?i(wvM>EHo&PpvUZ1vR4(oeq6u}Dcb`dpbUimA9OhoOUJlX!9F7)q zaiRvi+>8J6oX5`R_rcrgy&(_)pmEneSHsx;qc<%(2ha0&6apSRQSo+2`=Zf)&wakg z+ngJ*PqH2ddh@(LIT6|pmRi6(y*h~X^^d|=)E$Ox&fE4%TKqlTL#Y`jTLwwxL@)Aw zLGw~q5A;M|&i5g4Vs#x1KL~i52gBD^Z%1G=VC`;fRiXM$f0a_; zA-$y4_$hEmVd!*$Nf(mOD!NOH{{CgXS&SoMuoT+&pOBC>xi{kRQTFYNO_U&!Q5M z>EQGRlx&Uqrfoeistmt6#h;C0uJj?n2DZ zGh+tD`Dk&*)uDq0mSZ`Dbr9%H3(zENJzq2uZPcd*Guc2~*tIPJ^N_J?&pPdMPlim; zlchMhU>NxcCdf0Qp1rnuE&LRGY<=Nm0duSTyfzaWULx>N$nfgHmh+X#x@pRZVjiZ+(z9 zKOoLQc!m%~G{+jTY6dcqef3M6dF2i6w?jrW@&-S!BSP?${cxtb&jRfn;91WeAzBb={ z5?M&QJabtN<8K)eCt z2Qf>yY3RO-`-ziA{Z&>{O^Oxp?niBjmU;(g7MgH-IHaljjTvWDE3>JnBXO)twmFtM zyr#&uTv$m#?3PNoe;7?TicqeDUp2R_8y}Y@p^AMx_iU1wpBa(#j-Wc{ENVuZtAmJv z_+VL4l3u@$ws>?@%)*af5@|&fDK9K#$|7B)tcaHz(|AI$NKIW-Mu9GRzlNZ8Xk1U0 zof7>KMGqF`^KE`^%Bn0nX;wew`~KnroFyqcCPje&6V}pNV);0NxS0gkxB0;kpA0Y% zZ$xxzLVH3=zN5zXMT)rIaNneFhpz6Mg!6W!dsJ~sosm!!{sk$pYIxRR!yXA}e%Q1i z52zS9BNz~`pA>>GcWTqOLpBK0usXC=H4#nFq3XPD9T%)rgYZ1h+~YEY<>YDEno>3R z#BEN`t8l0FmBG*CxQ;T&VPP^qHr^PuQBWEwOhC^C?}e5@MKw^DmtS37Tj!iQhn^~Q+cH#? z?w&4_Z~#IYwdTw4F7XokJH|sp+0Ix=FRCIr%pl=}ANOz8Gh7d?EwL|tR9bCI@;K#^12m40&zdkqphadw& zbvOkF->!2;sU31gv6*rQ(iMpMa_Qp9-kv$we7Fl7mXs0XR{Cy2ZgFzB!1P|%EjiXA zHVL<%4=ATd|ftmc?ptks{k*=<%y<`T-+n_;zU(+WVffn9Cg=))fTc&N+ z?bsch%OTz{imV>HW(Wtn%`nbonB66Iotyoy#J}nn@tGJO`su1#cU#=RJndVM8JXNi z#xm1Zs~W(-(9=l`HcPWHtA}x?raVPf&{oXX4nocHJhv?A@>m~7PM0LOn%t4} zJ@TTjk?nV09+T2GwR@LxfP^$hsL>p=H;q5>A0td*)`< zovM|0YLsy$boGb}n+VI>k$EvB{Dt0AXC~;*oB<9GU&eWpFSk`8ytt6VcEzgCyatb6 zT`uTloVF;hfeymNrIu;(O|F=h;jV$cgM%ej>Wc~Dv(7oNM+N5**dl&(sbBH}1JC)c zd$4LRa487y4=i2>t*{>3Z=a9)CnT`t7t@=#AIx}>!I0n^hV2=x1PDcmfyr_(emPgr zG;-7QQf`QRgl@ZYW zGo=6XyGhWkV5Jtl@IDMM8bU=={^KVgs#2QemH39j=e{MlL6w&JuRN_|r=c%HxFK+u zVlE8}2g{{<|LVi2c`jLbj#CWz0QDZB;mtE-`JuS91zoQj;^h2t1ciUrDc_r-BB#ag z{-%n4M#$bCJnC<QQki8@yj`4Vjs%iXClt zfgp{+O{772V<^?2BFH1(pj`SZnt-$U@>$beesfBY6 zUS3NemJ@3D#fqkmMuM8SSSHBcyO5S{Tu4uxQTv?E(&yV>tNdCbGe5KWo0~7)nk@y* z^!SHtJ*Dd`Y>R0Np72UqNx|=vIEhDDDmohJf9AZ{i#i(c)G(nefBNxSzHjBaRwF^_ zRXJmnP#19VOa08M9h(yOQv%^hm(LWjEFLN6TXECKe3w)*W%)XQu85NtVXSV77FC?2 zh?*U^oMU>=N&!qd;#kn*T5BhC6GHKg@=h>ak?{MW+VJSJImFaLPImHal`P~+(-ozm z$r-C^N?+fBzDi4u5CdpUlu1KeaPurZ-n47`8Wt zLn)Kk^8Zdd2OO4dNs>&izG~)><@8*jm`m2N9kMObC$eO%->R;UOdvQrkaPSfWUM)O zMDVl9hRmS^-N$PDFt8uBH>vfSq^-s!atX-!q5Y%t;;LIFDX`_s?4pM)Bq7~CBMbVV zlIW8Hx!%)yHKgincYZ!D1Mu}pX~UdsqTpsRfDYcgIPM=h$dxlfKpV7oQr{H;+Y>uW zQfo0;YXUvPmxclsbI_3nqNM~$LyvqCOK50d|Kp!8)lK?8OU@Wn?@wFnYYA6($|QJe zrJeQAZGV4@LTusxD{=oLb_W5r|LGhNk(hx2G7m4NV)NgpS~tu6M$gzKqjIK}a3%v?BcvU?v#27nk ztn}s@PH%m|_X`)_YAt9c>oqGHu8c96)q{+I4eo*>Cn8_D++=-wxG?Bnb_D4y>>c<= zT=tZbFMmp3yTx;lBNnSER(-}Rumwot{^zRp1RJ#hqa^a}8n5U;rX>cKit{PHptuY_ zwpl4Lv62S&*FdR#G*MqM+v+6lKSgVg~+?lpN;w zTc+!S4mw^(=X5x50_?tFP52E0?EUBEDX#rqAmn*u+&g2OvuzPl6+U&l!5P1Myg|JG zfXuWz@1^i7z>nBo&<_C)=AWWv6Wmnvv7wM8Jb5xToaSaR2UMzyTcW8&#B5Xe?UoGF z?rSXP|HIZ<2DKT!T^?wG;uHz)?ocdvarfd5#WlsXIEA1s?(P=c3xq(6yL(z(ixw%H z|GvBX?#}Fo%rJb-ljpk6xqs(emj`n+R%5MO1J9#N9=(6KdK!YkR7;Wr`JS3qcVSe^ zXYR;S8EF@u=3naEu<+l*-+1K3m~D=s@@^hNUUy4mZ127e47z@B?72Ll%UySm3P#Rp zT*yPdvUa&LjDB9j2JNJ>m~T9w@>TQQ4Q}rB+xp0} zDYzcnzT^taaelfSeyH=akwr4MI8RzybQ`E~e!BRB>E6>Zj>W@y#e6xB#lv+spjs0I zvZ+Gno`j6Nr6N4ZZNm*A%oy-pw|1dwCrZ*VPtYY-@ROM$(3}7f1^3wYP&JsijjY@V z?AbPh+?s_pugpSe@F?hi)iPOguQBXoGkh)h1c>`QORcTBO#iD#Fhs;QQHqjpkOX}Z z4wuMH4SP5L8GigNWUk8}ce zc?lm~ydY!7=-nqbkO_VCJ^=ZJfPTb?9gbfPU*^0G);tys_VT8#g~9#xAN7C z!e|%fvd(1M>Zc3o&o9%RKIzfx3?)ivo{wldB|eq-KTcsNm+eJV`8T1*4aWjR5GT6|vHU!E3ByT!!pvVgsb~*FYKG zmkfvJ+jw}Iqic%#DqdxIv}I$lbOG{J1ur`H{8|zaOJ-iV4&{Xqo!&G-@3**1V+!6q zUtcuLYQC^AzQ070XsOH#9V(f${;6F%`$`G%(HcL@`5P#*XlPiAGQ|F)2Bp`Asqjo{ z=0M0zxEzn}w+}LSM9KRJ@~Hf*>{tbZcKE(I4ThqSH$P_T+6%O>PS34H4D@MWr!Z>O z5=p2%cCTsariTsI8X|+iSURYjlM`PQe4HXfN3g@4M?JO8(&h2S$pf@zZ%$9;G!+TE zjGd-84a4@1tOZi{ixT$Mhy@Q0%QJ|x4Fo=XSslI+gjLqC%MeqQRCOUmNKhS=1h0}; zzxJxDKd3R)Ka^z@SCZz++v*TB-DDB1|B9|C=C zcI1k>2}&Xpf^k@w@1~bOFgfviChW`(3LrNJ5~=Gv9F4zj@ot&M!g(nSJbZI~J)c8$^t=w}qS^&r7Il^x16>X;wK%45x2y4KbXq6$;%)p3m-g6#u2M8$|4 z++C$r1@m-$n5+5UY3JmFvs)-XRP3jG($4yt&~z_Aq`*ig1*HBfeaZSJR?#YxfOPI* z_s8pbMp#iZs~$$I=d?wBY{*0IYff%;Vz$aPu~KfBr)gt8-vGV%(q zRdi*v=yQh#SXD3;LQuqFU!{v$EBMD%?y{J4q|3_PWgN=adQL}-8YWa@ZU~psf5nWF z1~zFK4Ksyj!mfJ$S$T|mWaW)uCwDfa-s5Y#XI9jfcD$d7MizKw1E?so!yosKDS@8T zO3-1a8DU&^!ipo$o~tc2Ty9B$_0dbPBF`Q(4X3TW&IvjU|N0QgBmkYYy` zIqVGA=YO0sYK>cFTJfo(#`#M?KhpWoT(vFQ32+$Cj3&bCHLh>(thDC2_5xr!m`82M zK1V9anRA*oI$DV9hU^zH%i38|!|}q5F!!2Svvq##ORD=uq#^7$qo?QF#?X-nFIV7| z9m(mO!XsE&(Xri5DJ8tTWs2fs<@};xdQAg5-`v07#cSAJoeR9U58LUx3$UsEdq5gb{u7bmFUv8Gq#3{Ph_jcApY{;c!#bb?w2^0et4Jp-!Uyu zvLy%WZ}0Lc_B@=FbbJ~n3~Z~7Nc20te@%W+vtjUB#eTu=y2NUzj9ajeDVAkt-(VcI@h9*KW4$C1jD-xr97-oMH7FQ7C<)q>h-DCvNrV znpg-c^KihnnVVARsNZM9YuP;$_jp0q>U#*exSD``o(+wvgeWf6m<)RTubKVyvk82Zd zlBTtG&>6XmVYg7E%_R`sO%dLh_mM|?e#9L5xJSUbolAvtE6f-jVYKc#^fRA;Y%ObX zzcCR)K$ov;P0Dd?jwc!s6AOW?KyA0!Iyy-Q2ZC^Gg^!_|LCk84bDfjfk(vU-2`F%d zU@p0Wb+FGvpqmRq!&!*G=AsZh1$3|Rtxw^?ao1_6iK;5)Rh$AUac3UVSLsuK zF=JL{=MaO(Q$3JjpJq$6lBtruK7B|fQW8w)79&6Gx-D*KPuRxmFn3Y^xSDO{F25R-7YZb9S z;**1W4kklVH-AsbFOTx1v}tZL6?Qaa$$Q0;<15eHFm5oPwQQ+rTtBI-FqM{&xwt zAj!hzN@e_*oV*P4=q|q&WnOqsGVs@W0yB)3hZQ%f2m^(b1VaB+i-0(0I39SNT8i<0 z-5{N*ytm8|1>g%{DaxBSPX1E9&MNRru27PNymf`Iay(wnC;+IU3)M;;68v&b$FoEI z34TTwLmX)E-a0K1$Pg!eF~Rq>(8KtbI$!V`qMG#~>{wJ*J5+i>p*M$@13%QTOQzts zUyI#2r1`qPFj1E10lSm;CzYr)`;sPxf9WW$Hm zWbAXD%yP7R>0zKo52_JZWO39P8yV@?{{&KsFavrC&!JCn`TUIkb}12m{o9ZJSWONv z5(o_Jj&v+jr|XQ%p;u3n@jXE3ujU#*&eznaP!&V*E)9&BD2FUvEq>NL zfqCrQkwNQv?QtgZzuiIq+&&9LFe{s8ysc8RL9f%F{x@M`2~jbvKnzBb);qeQCJaZx zI#!2MX-5;OYOxrt8NM%FXNX_YolT`?*uP2kYu_bnheDiSGw%ZjHaQa;+)+|e#b@fv zw_9`~{~F$OU6F-6?uhjKS?3l9ZBmWgWq%=unKs{R7Z6|gS~j-fC%LYCiRi15XWu+c zMC*|6jD+r-cO5m(GDD272;auAlW*LGw*o)BGP$@%=4dQg9 z*{Q-aH{D}chYF|rZLi3?`Pubh<>&9mSL}<;_3-EC?T6d%b?#=N*86>`XV5szw0dd5 zSr!5@Jq>f|i-I$3&LahZqAU0$EwvuOZOtCco~Q8k3E!Pz1z$?zMzpm($DJW&@17$e zEQ?d4*c_Jlw?3Ny(2{A&Q!UAC{+;Y9)<30?Nj#8dAj65W@4EjW>SvMt$a77x?X{89 zi@zYTAisIEb86lZ=WgMhveRP^n=x|n!&XX?!|XioyrdDc{hegE=?Nq!b_16mGUhMT zw#?_3hx^%?%VmcgR_=81jydZc>0(%epqVU(tDo_^gN4uv)QKENC{zvIt^C15V3x@H z3stEBN|2q4D1WA+Q*4)GZ@)P08 z0k@40Kvx~YGiwd+3AP3@jpn(PgyG*Be}3CKM^73Hpsl(|L`>QM5_Apb)o)8dfx25E zZC8yjJE6Q7p(j@_2b$oH%;Wz2a~!b0N0T|W@LH6IkZ>rt3OND%(8wG5Ed{{2s2(A9 zb9o=akh|H8Ax7!nGY6To#l`(w2X)I}^4KiJ7)$#G@&C(6T0a}vnYU62UH<#T;<4#V ze%nTYDD_2J9Dd;4sEpFXs^~YhN0HS3yv^%2y(IJNGT7l;*yVRcCA{x%=@2OG1x-gB zO~3VK68J<43HCK7bD)`B5C-ixd?3(3vH?hWMgF4(@b~C-f?w#apKrQeYAd;3S?FzX zrZs0K)rhts4Y#~h`ehBBb|y=yt}-w1yB!@U*!v2jJp|4aRT67#7yaYLzU4UR!e%`t z@ttcu9QmQevzk+bphF_p)ikasIc;k zuJ_8)T}omb$~L>%Lv`2T3yY>n*0E(9m{-3eUavoMPKQLE zv5=D)vrgplfK5Z*!cjCp)v&51m);U&LYJp1jY594tCc}4ZSzJ}PoeY|yGGe@3S~I` z12*C;!fma$u`}+VOs36=fzMC6H$RJl5B6u`Wv;RWiMA@+1I01pI=T4}M@4ej34QtB zdrJg1Ir~J##A>I+iEM*5l&g~+){y%3Ve*wFCsNa>1ScC^_7WeXogt-+D;Xm{uN^A; zocf$lcE%BvptJ$!F+q_S+@N18_RJG`!lJ`)yAC6IWu&6_9tq388?g! zz9lX@Uy!L6zBOX+;M)KS$V}0$yeYz(yqQ^B10&kfa1kHwkcUH-2Sx3OPvHNJS^hV0 zNjLJcfnUxpB#wh)UD$xWVfM8q)0tS6t&*l6VaI1u4J}l%RMA2ZxF%0oft#D#duP=W zC9w%|tY|&9Y9`Bd*1C!|_5;(gXFd-A;)~I}E3WvAzwru+0ua90$k5SvVLxp_8nzJx z066qC5%`IW4mJ{1#QP=u&oLo1vb)_u3%-vh?CH5MC?ogSGQSrM6{8QaCpqGkA)|s2 zb{*CBq-9jkN)68PULdY_&TCqmn@OXS(io)eI7N^^q~KQ)q(=spg*D{Cd3p!Ou5u=x zd`_bTGg@iU*}K4xBQK%Lg@UB0@2t?c^<$d$IAT zhG+4U8{>#x>ao|no4?=T32SuiG*{ql!$(Xn>wfD*PUJ)rBwQY33gF7ZN31^1%3U8h zmJE0ZjwYQ*-*$f=DA;-=A}R2vFyVPdJIj!a82jCzWC1;1T>}0YF1XzDZNHc}|2~{S z(;MDy(H&`E5kTqQ>xVLxdkOm~VYj@~iJiFjfVe?PFr(%G1$EOUfFw{T_iVs$%9Bg8 z*l&-VpuT~C>Fex*Sk#h~P-3&6KXbKCOa;8b>cUCWO9&l~WLP0Dm=}`; zFzvX#C_h9@v}mZE>$2NC{`B(7M^SMuzt6#bvwS@w@~vseH+&`?aa0KcmA%~J%I4Qu z55^P4lJ@;h38F4b>Lrp)B4pJA6PbzjwYWL3!yv(%Wv0!ErC9S|N)5urv9R)+0k9UwTe&mhI+4!s+r_E5w0U2Js?z zpp$rFG~|2_@gT-~;yET#4L}0R=UHn=Nidcbglw}S&@UPXPG0>1qXpo>XBl?OF&0$fQdE#G;Lh<#H!()md4_OGK% zq0Q~hIM7$u(`OFveFLe`0!$8J#aZ?0QfGLo{Rn@jJ4(#>So>>6{5TPsRhW|M8yOAb zmeAo7A%uRyuOE$*L2K4?aH=oONPf3Ne|G3X;8-V?%|26B345<8MqUHf(#uX9?xJs* z4moX6*EWdr5!GfGTj^_kGG#UG9g zFybG(968gx+#r>ukZT{`=?CLfqK}2CL!liBpY;rk!uP5_2^i=XGYjK+|Bb4g;*oLH zM|PUM(#lGWV%M>?PWQmg7%ng36s01DkRi0$dw(_2QF>i%74}3m1#<`SrZahlD-EJ1 zB@_TUOhQSSs@jV*Mqanzt;3iqekg)dEGF_IW-9tNuTPq^_yiRAJsfvEw?LLssfH(O zZZ?Y*0$E1GE&c>*n^2SanXW5esN7TR@ zFPD$Bht%}4=Wjp{j`bFlB(Qp$RSjJqs*#Z7^CJ{H-VEBT}aDaK<>aJy4!LM#s)LhrrW@hF% z##LRP863{Kd1f^Y?vx%X`tJ=2GxK1GGgl{#0|k;OdYC!lq_BU%$~uhin4GT6;AN%| zJ)^kR9eq|3tQ8I$)Khz|JOb3@!qVog`ie=|1na)AR?_Ox*VI$lp1!I)pqBF{c?FlA z_r5NyFbE6F4?vFZ7OO6FDDkZSDPtQ#^;zwAt)fXv7V>Yrc zjJsO}PXzYZ5>sCpMK=dxgY53?PJArFkt$G^_nQHa;s%x#aB)hIMXWDw$M$}2{g>q) zBKZe$hXQfxS8$qv>3hWGStc5+36=E|L}vR^;{I$RH|B<1)BIl>-^^P6@(@g{4M14@`E%Hx8;iBcs{N7woESwQ{9r71s?fC!T>6TOX zdO&e21`SO?n61-in-p~O4g64pgoFFzClALeiQ5Tdv=@KG@>I~fe9svei&)0alp+r3 zJeP~Up^N^Xqb8%oKKK_&JME6V)R6do#B_zk#h9&q(B}cNEprbjOGV!0A8Ak1O$c&E zoBMDS5`*|zZ=7U9l;6A+;CAgBrM*?b9H&d`LR3X8+PZqV4c3Ia_qM?J0}AP*kpm`V zYOeBb{u!mb^oqe1tpovL z?&!J7gnFdm&H083wj|YvhhoX`WvlV+%*nA&XitdkGwBm^w^nBdfC zE`^gWAZRW{OrftnFbewyaoHpyt6#o6>S}Y#={#%74juEcL-X)x51WIKFj@xPG9V0eO#V5hM!top zyeeunAqUITVr7qPF=UG)jrZLIq)W%nbSKM_tRF3(8pv#H42Q; z+%9>d5J7XPzOv#K2asOL>XsIj-<)1so>6^}@#sp&j*AIc;}u^9_ti6dv9ZsrX~zth zGx1U%0m!LOijUW$+R{adelycCL`i49evy0Fj|Y~rG{J$`B8PmH4daS7!DQ@X%~}?D zF?-QE0iBg4Bf5SHUBXUvTFfim16VmhXRdxXFKXPL6E+C*Xi`~xGAun_pHAE`CD!TJk*W=x*;6&y$u$x1bB1!Bd-lT~+m zz50i6+ANiG-YD}+;^6BRlA#ho9>7HBwb92pRTThq_YE?b$pFOr2afO`|8XPyNBSJ5 z(967;>>(XYr+gI+o9LlK?Ve}uch-icNkFY?Wwp`msjza#mc+bv1 zl|wIN9VSCg%3&FmrN+=2MkYkfBu9kt&$6+`G-4`dgs^KYR(DDMw8mw-YL9rwAyzAp zI(k`nupfT#VYs?&wRX-+ca8?igEDj(Du+Z>};|Cu<^(`O+~F;#U+(opP-=9z2GX)$8{uR9Jzq}3jm7>__cWHgH*-JqaeEn8OR~Oc-@PC-e z|Mn%}FOOOw_7d+nn;dTMd`l|2sq(0}9NeABqhK~0umo%m3hHQ>Oiv#~;}cQ4kW7(- zf{9792Ww5KR?8b&j;1BcOC*1Wi>guS~%B|SNE5Wr6vZ@9=#sM+Y z^J3xA`HzD};+XNDQ4x z^}kd)=Rsf-i@xZk2A7e)W(vS=Zz}F{;gL>7F36TWlTRlPYzIk}^3Et`t_^$<{nvV( z-z;h{?P{jDEczb?oKL`wvWrDit}Gu-N0?Lk!pAr0X2?#-J*{u^n9y3{;vLXWQWW7?Y202cW}B*!3#NZdN0*Jbi)m~&9645 zVT}FWhXS=bC}8A+U2M*jX3`n#uB1$@|u!GTw)4CbHa1znPkr81T0Nq!R`1fJ3FxsKu|`Hm(84zRk5 z)B}ow2q)FPSzb0{ZCs3Yart@A>8~3eMvP(QdIYu6`Ap`-VXugmuGwgn~G4cASfnkaEacHs8xlnn?7yJEuu7* zYcXKi+6D%r!@>|*lkdx7MBHj=<#kg8XQZlj`)ohUY0wmSc*s8<_KkfH7>0z z@7qupJOG~9Etc3EO+yf%rXbc@F3CX5YbUDIz8O#%pFN zwkducf9%zYALPYj>}+fK#6vO*O_GL;>WR}1?EZUs1CUaZ-H^frA6gAG`VkRbJ-vEn zFnmfL`mAE9%Fl@mR@^!HEYtELYni!-ars0l@j6bqWei%FIkeWHCH2Y*8H~zn=T|EZ zP)7Y47^5(~A{E&u9lblBJY_EzJ_wbSI5>AxFmPt-T6N-G-b#FgK9iCi{09Kz@{Mbm zp14V4{Sil^VeMo8sd{rg9+B&rBM3)jh5P&5>8I@@T*X&htx=T?qa`GWfd{ITv-Rih zt^H7Brj0JI%Oq-CeSl}j#;($=WndEDW*0C}$qsO=AmhICYMad@pEfiKeP|LVvSp!X zV8bT+`Ii2a2PzfaC!U@fsgpO;1`P+mVdOXP-Nf{Lw1~J<`-0H`Tb#$(x`L#W&L^$3 z)P(9G&JV-(h4krkpe~QM8S>3JnMB{BzgB)ojhq{kF?^@aqzF=eJ)y{0M9Yb66vn6Y zoiMf~XQ!0+<7Oh!4ftxsit}(jd5r zPOG;tAGymsQ2j)#dYJ2PjxxxnQo&u3xYshve#fUtM$IC}&znALeg%0)z;Lh`Tln|^ zW#A|Mvw+xFCI`-&k$Ss-Z&5aE#3%g_DfAQdxZjTnBjX5zUu#SL!w;%egRX`S^LI;9 zdyjk(8@E+6UVZb?lL=Fr;gerGG<~>PMwCRqHk2sM=Dp4K`NRY812IlkGpalg5T|68&FuZ$5u(7c*n*RTW;{MCZ zeEA-ApJ*JUDfuNVjQaZ4r?5sfEb-Q^r5(b5kId=jrh{yQ0TA|Z-3`!CoY*$h`4$Sh zK>@UD)8tXr4VJBMEC^L6`a~fO)wZ?idwI1KH8-1$U-{}GmQAWCb0Nc3<{RN1khk7- zm;x=nB}L9iHyaFr7CAxtq>-+a=^BR3Uc_T(G=s7 z_p!xzQKiLgk{<=U=-c~<>s-|jj;mpzl_sW%vroW}Wt;NKR+L9$@3hjX<9;Q(jKzI-7yF^Et~tr@DS-&jj@beSpJ? z;DRTVimfM!&Br~2&KudS?57c+G+ z+8P0D3Fg0jtb=irkEKv4++U=@J8m{8?w|BTCpBzd1E-)G4Yte8wt{W`Tv6Q{l*?HY zj1$p0D=3t0f`j1t5Wf*eJaggAxQsB{wQI2j!Ri@cpLp2jzoHgy>_2mK-r*17Tc^pwN>beG_gzPEq4`dk zq`n^7JU1T6@%KOXeExMj=H}-~_6C0NKa`5OLcAjbS!0mEdH`-+UR;e&wjYZnb)RSl zu2h}?toKUjP4CH<=^E}geMfPDnq4~?&eZo%Cf}aGdGe`q!9mrSx{D`1^xU%J#xYk^ zaaX{=j0?A^61D!7vghlkkLdO7Ws*U)c5mLCxK>5f-8MJQ`6pJ6b*Ir)GSvy&XV%t| zj2{+s;_{vzyNeN;)Y^wT*Q#m{d$$pq&dLl;s7Uzs4d%qT?AsePjmxmhr~Q6L*j6`cY|nGy=Q`kS=lvkmI#sx6v=A-U;$?i_c#pGxC+2CM!2I=-{OBKARF`WK>|NmauJ zV;h%)MhhFPXm##DXWfD22nXf(rpcbBgrVn^B3Uewol+Zyz&QxYf z7*h%xE3T%3wjn{B%r`BBgB-pn&sajs`PQEf^rMF)YTm?3iLPuDTXwk4)Rn(7jgP1wlI0Y{LJjKc)5qW70i$BbZ>Qb6vC_N6_fO@>lGJ;z#s}o!}wX zG{Q`xv9C^eHH)AP>$DsLRJRrHFD%%JCf%>~~qOR|=#bvi%l0hNjLOyKG z6vkzKU5PXEcpThulXRk)LvyrpFBty>E8m^ZcyW3G+Xw)RY-suiZ-64g*Swd^MyEmAg*0|pg(f0|-EJcCuhv_- z<{}D+t$Ig}UgBC+)EhMpIV~TNS!ZH|pH9+xOJ7lj4qCuuOgAX$YLokTOnKm_U|_?;i<>KS(7CTL{`Ir_ z#S_eId9Q=AyLNB8_$DaGs*4mdW?v%XOfJfjFtU9QBBf<3*HB2zXAUtGC23eCgJ0(T zTlT-^eB$myo#L=~qjlo*n*uTEs8G0H&|>m~aNR@xNQPiMU`YMD#*lnG5bb;UWBYWy zFy{C8cjV&Nx9-bH`zd(Ew$yHe?{`=XjYI^C#^cTp4Z_^Oc>n?+-x)_X>A9KJ4*ECn z&F^qk`s8NgwZ+Ka!LXh+cf_Ep4@Gp{0<$RkLCwfMhP6Z9=p;Rk%{UF@4{(^?W(Q=P zN^`OaDzENoy4XCJw&LSA*bmrm~c#7G;4D&+I5ac-_%aU|@_&#@JOxEzD+)BU$& zMG}D}?ol=m(aTbSlD9m%Q24Nu->MPb_m#Fc7Br?YG@35M6V3V^!86pKySGE7@*q%> zq_yo{LLtB3ja2#T@^I|s$SW^bjnn1$XG!Pn^B$D5MevR_AI+IQ*0$qBHo7qKhtACg z>K@MP?E&5;MDz&wAza zrrTvPlBwISGW-L~e_t#;{3VK=rfj~CmT9TnvL)ZK9^#_;7o znzCl@sk4V>4FA|4D_nLfLE~Ji9`N`nN6MXt^lgFAs?7&eZBZ?HCoXom$C`b7$pH~P zPplj`aVn3IFt0f@})Hhq?|AS_AGlkm2Z!nKW_LuAKECPNcu@8}7@}{00g=si{wfwLY^vbg%q0Ny3%K z=@QF6gq38NV;(RSHK&-r7oAq+p}RpLWqA_=dA)Z=%a9cYN)2_!b|;(Mvm2)6XT!cp z>vTAGkumb;j{7sGrE4BqUFIf{Q z6+*i3>u#6?$`!QlI%*)~*a=_2d;Ow*K!>nDt<=^snF$}@-OBYorvtN%1>XMYX*MDd3w9?2Ehige&_J)hOq%@RJhWWI94Hq(rk*gcc zR8CmNfJjGmWwp}@M6V;JVDUUjx`$J8@%h;bXDz(8U2zr94+ z5>f|g)1_VKU6Ez=G!T&hcFXZ(AAvFfb*9S@?8Ftv@qR;;wjdsei7=C-+NbTrFc|h{ zCVaKIH>*6RIv-do^6hvz2duQW3^DAO3~M9ju|uQIR7SvQv&vU^*oG$p>HU1ro%gFO zZw|d&-+d)zMUSJASK_vFgFb3^8+L7_!V@vOxz7FubT^m-NpR|zfwC#gnO%%p# z+V)vfDI=;f^C*%9}E)3k**w~-Srb~@7lH0X9H1yhjGIyZh-!;pJ4_~WRR~bX4A$!4T3;cQP`nB1 z_5idOcB}GXf*&8dBK`Vu==iF-yI*ZcFlN64+94j;_%hMq6W;pzYDN$joP1tLptxQh zHU3d+P2|gJ{o8q;ZvlpQp_dI-d|+Mec3cz*s7v9l``BMGJ|DswN*KFDC@cahdq71k zJ#rD;GFu))3sI7q@OY49?~2i$UW`e7z?IVzpzCNvqx-w?=4Z8x1A>(GWPh7$k`T#` z8b^G>c}CC(tnI*sbfU8IOoSIhGV4twL|2crb#r4+D!-h9^gHx&)6tmm$ofht1KzlT{|MnkL?EX;md^_q&3^ZwnVQyXnE&w>wxksa|E->48 zh(mWT9STIrU?@Qe=!jT&|L@qXSW%ed()WQdlJC%{B}wSb ze*S>x67Ei~@Fe2Cb+g}|Mm%Z>;a!UHP!Cjm?+g{r_b1BG{JZ-Ub?W!0lc6Ihm`0<( zVCe8pRri8JgB;o`DZW~!$1%|jyUp1FF$P$Oes z>6pbT9j#+ycz8O~q_a|2sWTYtdE#N6PUfR-fiWx=tzJIYB-&=^NEbS%X#%QttVfuX`x7VY zdNE09i5}#t+v!%PF84YG`FjuaH!Xjws~QdK&;n(-5Y?736Qa?5h|#;riAr_)37Ss-P?&9K!3LU)hxS(C#v4=) zxSo9sDic!%#w*^VlQ!Ga!bjP>1Q+JEXz>0+wM@u_8tt&_FR)SfZ?m$d&%bE?2fao& zv|00@QNhr9a-Io5`*|Wjy^GEhVM7izG#n>ryqAnvdXd$t{Yvl$)3BDPPQTa+H(&Q~ zVBOGC8~~6C9n=v$;wGnE;@Eq@e)giCHDrppS!s_W8#tm%QU3GHkgjVSW<~K{e zisJHMOj&|b_;6_vt>{}eIxlJ~Uo9e%FWD_h$dM+VWCSBc>ZK(bg;lSUOg@Qld2=2) z6lAMaMT6vxfulMMTG`}@)(^RHUuCtENy8B`(5ykcE2NV>hL4Sdn>33N4VG&zDtMNU zbo~=Ms>F?fP6s0_>Ns6jsF`$o6QIjR<%Pe&fgQq>IYw}pS(WhGHNx?;&)sIp6ep$W zl0kT%&XFCm%OUVmI}DL&>rV^g6N@Kfy}C3a0H$yAMYtR;9{))#QzzO_ld{rzaT zPIKu+Gb2)Q6)d|H@fj)QE3HVV4Vm#3$T~drF*>f#&orf|9hOA!r+HT%$9=)ycBTMh zhqwHn&7d1bKh@7-|9S|e<}n#5xa;SP*K_5R-wr1(Q9m%dUeWLe*N_}Y<%kwJZAEa3 z=MUoac1}h`MfC<7d#HIz63gD+-oe>f>UE-s+0AGg$K|q3(hlPPvOYd-evAKCIp+Tv zgHKTWEmb_5Na@S>2&PzcdaLBU1xZB$bxyePAv<8w+V*sp?+k%&8ivETTI%Y`-sZXD z>1f?~wH7tRG^_hegSvR9wmX%N=b;U69r(Wk}Vgm5KI>3X8 zIjIR6JM0}usUq%B5pAgR?1tmJ0Y4Uc@3a+0rreICUF7-C_Vl)=*!=Hpbc8~FvMMO~ zFcn`pzqMHXQaBh8nbA&L0YSe^bA7{IEvptn_U`JFwr*BZ`mm0GaH{l-X+qnu$~=IP zf#=p;3G4CI*H_;ao-CvWCNvv%V(=#EFDDfpek92`Yrl)L{uh_C)fax3zn+BGt9CJ0 zou#yp6aC;wI3sK&9zBvIyOqjTnPnBatmk}@(8}d@?+P7wX4wh8cI?RYN9QK_%%Ou= zQt_93E&qXwyYDZti{}cS?c*iQ?HXa~5#e)l7)e7uR8f;=SWNRAs+#fY z;>1Ww+%trtJ{Pw?<@t8`tX%^befPI^ph}VIV!TWO2pK^O6gSwO;c^C9*3kL*HAL0i zaQdxGLG{}a2@6xsmN9Rp#4hrk>ShM>rt1NYk>T z&n1q3PVKcoU!i>hnBT=CmI@}S4;0=&si?@>X(2zYQ9a&Uh7YNEo@Dl+QGWyUgVxo6 zlJBt2lE!8G@0m~lG+A9K5AFTBSY{Wy+9>`xiaKmo> z`9U6YUb)S4Fq|oWzrZzupdNQrNwE%G8k%>*CP`U4!y){+8*Lbm8wS=HSC}WBCSXMg zA(b6;I`k7p|k(qlxS?S%P!M$8I!N7liBPcNF9fm+ytF#>ft?ssfv zuG5Xc`587j0{VtV6(guV&wkrBFB=L+c@5+o0*H0#UuU5%^9L;gTH~m24wvn^=A|l{ z7XgCtdFl?>LZ+TZ1;eFGp^)|{yJ z4Th{a(}^DjbWH!fUzr#%w|Kw~5+0nhLyk9awtU7EblC)AG5h#5hMb@R*_=tmP9>1h z*Yq08FPCRqh0XYhDF7Og`{A+o`sc{JZaFrgxhzwKCWrwjA^YA4IgZJIFG5{IYNGL8 z3OybHH-thQpoM>F`gdZ6f|Uuzuv5_wI|f1HZZ<#lIAf`0uf-p>5DO7$eaD*!E}A~{ zXVQQKw9%%jt}QiC!Tv9hLc=c)>0UG&5eyE;RPUGi8}X%RA7d1U8mj$9*;EzK0uPuX z%!XwL9mU0$nU09AuoSzCspHbgy;_97SR}C-AIcZ^5ec?3v2so*=m*rMhKmXkuwCk~ zThwD9qV~uXVya|q#C9Wynl3_|o8LdU)Z}%n@pT9zw2hO)^V^uQKW*LEI@FHK%7bv@ zuI985H7#*XrbK&;g}lNk)Ll^rh%!yb%C(&889BVI@@ULUXW07wpJSwdj%E7 z4qTTY&WbWz^rd&{g`k5F$t}PH9lO96FVj5Riub{NBCSyZ74v_ZuwuFpI^^{oL1ep2uPL?FI^Y z4j{mW&GRs7w8hV*0JXO4h^l@XJ?p&!A)dELX0^gxIX$50fs{8~v+AEdCs^oP*cItu zvoS>966$|I8bueHz>8BFriIAS@-V=NRX+@E6}j+B#!`YWoYKeM10i%1h|F3r68HB?-~fcz&iy4Zp9wW6abilO_Cc0Nhe@-fByd*xydCaT|* z{A}up@~RGKF$WT)Gq+&oNG4f+jRPKhWs!mH2=n*#D{08~G^iG}DqIW9;1de~!IfeK zMUNw)WFDzM`Dp#!iwoHTUV`#~bM|AU4KLiPl8*0mBtPskm7)9P-^U^1O)-HtC86L1 zt1Y1(?mkPHlsCV+TUMA`;4f~&G#z#Iouo~{aN~wLyJD2_ro0d*h7z>x?Ld1C&y#3a zp=U^vUFOKk0)EMO`^+O|75FNcL6ny>;fX6E+6pF{`91jsx0gU}ISz$NM3Nm9lkxB| z4L)foP#?&Nk$9O>t4sPvYXlpwCo-wP=p*=5=9Yj-z+If-0gEv_?zy|Tu8yFgp~3g4 zrj?M8FjU-Q_kmU{PdY%P#bqs~bt9Pi{QSHq{{PTjh{8gSx$Rt)JC{B&4~b%Xnhf)Kx5}_S?6Nm1R_Ky}T-1?Btyt zF&$R?KigMQQW0?G4qs~{IKzzLgCLK0%_TE;LnV2P0D~IO^dWbV&xGC(7J_uG0@e99 z9kxYuzUF)188sFrAzKz3~WTF?bUBor7|EsXM5L9zj3cNk!e5g%MW1L8t z*Oj~n^R)?Y$w?!gLnHFnNB(S>D)s-lo-t0wP_m-@yA#|uK@QoR!apMX_KMtVuDr;0 z+gH2X*L?Opo9AWHHwS4K824sS;?BeQ(W1xQTi@Ie`u{-D7bIrb1`Vgpb+G-?y7S}C z{7r(8JMe#&4}xcGix7Y6kjGy*E>Hh3PJ*wfp6+G@Z_dNZ&#sPa&vt*)`refg0QK_N zMir@s0bA)+CtC|#1j5TGVn9;E%`>07rM8z0t z-LZ6PH2P@e0{8uiAaRvA?RCsX0Zv6NAolr@Q{rb^wQD4<|E^f6OPsDlnOLz4Tu?XO z-XR9JqsbW_A3D6(%mst2(Ug~GVCCkzgn0-$ySbM|x`8`2jgg{Yf?JoP8f~z(Z56QP zRK#yF0c+GryUeX0#sRC)-LO3a-z|FN*`6d!XS9rNY{)Xc;F zTwaQ~76w)j%o^2u`+>0!$Tj$YmfiVS?Po-1C681a0ZGtewUxQlJ0weTlEXPYt@-n2 z3lT<(L%tF@@oi6h2#udn-%!gv^9anc#TR#*2v<~le&(zj((*LXUr(J_kXzv$ zTHzdv&iL+MSo_UTEZHC1*J9q|<^0PUwv|Nf@fHN!*e#-#QT50ptg z_8tKQdESfn4GFX}%R)9m|10ykriTrthJ>S*Z;K#?y!VtAciM@KXaL4JH#DWo>56Ex zNDE<*yQ~!%&L2PK&DeDy*dUk}t;s1|@Bdw|+PEq{75-ChOJ)9Ev%nk9i7Q-e6g@(C5sGiHhNBU1U>?p%8-D+vgEA zah2!t4?`TbjfC7^m}z%p?(E7Z0otbLRbh4+F&~{AeP_I?@qRvi)pZmmpTMFfqBB2j z$zq(~8DEFBn{8gMkG;sqvGC1&Y)3x}`uEX^Xcc`71YQDRub3ch9jA`OnxReBsLv zadtrSy*Q4p%S(7@|IkUv)qvXY*1(YLwy74hAY58e!y{)MJ$tTUWogS1-tr608s~73 z@3lQV+>W-qJVDV#Ukb-PB`J%PW0P;-)fQc;H;8jdI!l@3yN0ro;Y&3uU9<;oe04>B z1&dOTwv55aEqO+4VnGlOR@sv(0HG`Y!~;s%#g*SqR^X*SXk|)v=QIx8F44Ayi3zwU zy)Y7st;7S;n*h56Qyg8qkpDGKSJeEp(P(0KY|LlEd28h&aaIf*K-chJ?vvS*l%+U# zUipIiHrj4d{Z)_)%HjUSmx%-wFMXOHf>zpjM6t6?Y@{=`x*5a?C=kR%y%;0K5;J_r z9wzsi3`sbn7Ps8OhCw~1opn~baRB=J1n6eoUf5!d@ul@u{(7(;sYi>8Y}Bvb`6EXx z^pbzndW5I5SLuh1H~xhP#*Ze`l8}V9FB%7dQM;~8d1V~tTsi$Eq<7Ur{C-Zr5A*e1 z_N@NGnA^DHwa}~DLn?W(1w1t0_wX|NQpepu;lrRFO4AE|=8qFQ7E$~gT2(^FRr9Nq zs#_>jtaE;9?3J$3mkTpnK7=HnCB3PG;jr(!01*~9EaO1XPH zc(#-1la;b0KQGS@j!~fxA{|TTeeRSPF*Lj+-rt4q<%iJ8$;n+3)OcGv{?F3GPKas6 zD}L&#JHGji$i5zt%j>41)^hZu*TTqqqN=Wn`!=|ndvv- z9Uc6=ZCPc_!WC7nuih#MR%NfeDc+ZNmh6m+dV`Rb??_yxi|dG`Bu2PpIb?}ku94Tb zMb7ghop+0-r@U{ydc04;RtvQx?u@yI!)Jtj>75_1XR6UAa)LX;mVbUa{LFDmC$APH z*{^=+Cdfn|5~tQ{`weYpE??o?BM|=u?iV6my{NM;s)rQq82Z^5B16N(4?kT z7zE=cvi3yamann2&vsq^o`P)-@A;k#Y;0Y)%Jyl=KI6}5e`8=g#$3Jq_x9nek7c#g ze$&TwbKQV>h{>aec&JX*HJ=|+9#K78a6I#G3!aRQrhsMh=s$9zCENzdj%c2DUie2|ziSx8o zJBPD2alzhg5bt?uoZkpBrSL<(WNv~Es?#VZ=UN2))5L1NCs<;W=4U#^c^uk^sT^tT zXIorfdb#B{A$ccK^yUrLFP#v<7=97-nC00=(KoY19P!!Ze9sYSB~+;S=kLL z&a^EWCitQbr+gM7$K{m0K6!L>!t?#GF9!fIIvm%f(QVQS8RE;;XLT~xFk&s8v47~0qX@Dz+{ zGf_J6=Ik=px&_e+gpI`CAz6uf<7OZA4?#aI&+;qb{eIV!AdDK^(ik!38~ev8`Nhi8 z8@$PEAw>>H@=pi#D?*h2;JRo9S53h656^BR^QZcOP)?X<&G{js29z;7HyUsTp}|KaW(^i-7+FI^MikH zb@@WlsK9mn5w|_IEI>g*VbLW71RnMn6!$Z7m=PZjWi?435vkfKZ`Yj>#)-3KIQoI@ zIkypQr2u@eL^2u=Js|zMoHI5`pOeQeki^nfv$DWtisLVlJw0O>o;Hjl zG*4FC!c1b_t~xsLQ6A#oRB`m=%f>=Z`cl}m3UdZb8(vHQ;XRoO_6r0wrTXh)`eYcsCe;j zx5*v5O=|3FcvZNiKVMwTzA?^-CYFG>D06)kyW*t{?=!U2s1IHCJAl28B_I#wi*%eV zQ+&ZGtvt^Ql*1~cw?;MMi6*rnxH*uqB~|TFtr#UgH7Yf$ImJ6WcsJJwkn0RSG&gyVoss6L&sM@iGDo@X)w zYA$W4bO5U;uYxy4xPIYlo-(FQs@!SIQRH4oGhz&DqfkVBlx50f_pI~S@HF;b_K0zi z;iYo~U5(h6S}iHjP%4gK5%XBVdyjb%5Nf}F`TO)hprQKzFiCx0 zXUwen>=iRomJhHqeQ-NEvS+^AS5SgmRqHdhc+1nlS22;py_Vn_M~7cm#CkR=?glEj zcO5Hy^N8pzbCC=>>Qd?))VCJ4Sm|HXIJ8cH>uq=>78g1oZ$plcvUgdDtr5qi>h zPhs9jS-uOdEXPD++nr5$97wUKvf7j(WhUdY@%c&^ZkeU;F|nOT^o#S3(9}45W29_* z!07tyd`k5ER?FvXhN_2U`(FF^XInY2?HY?Z!8>~szN z;LHOI-mKB_{hWGX1`0GD&_t_*BUrz)Af1os5Q~ZEvys|4F&qPs6U;A-8=@uJ7hRd{rZz!gTb7NZaz;t(k@GTlTBr}!JRv4CgvF|AE3DzB zk3bpm(yUjvssVOpJ2t($L_x_h%lFB*z@f>HuVEZNH~;T@JHD@fWp>4A|}5siJGPDaHEv~b;qphl{e29KYwsZZekXF|Ey z^kd>@{Q2ngjqlF*GLdZ+Y6+@YHhIgn{(a3uXC-ll))B7DoVZ_P5AOWj{keHss%@-do!XZ3X~S~?<`IeL>F^}hXhM1Is4 zGwjqVhX;WlHo0)|+1S==z1ss^F)c`sEX10tfVy?=t6_KeOuu)C$nFwF9T^$LtA zF9)}>^Y#pfKJU5r(7JpOro*1n$i`t?s1Q9ezK4C>a)0Epn$`PJN81XFBS6*1Hy_q` z4uaOrg`WRa<$K5fZk|u1l%KLL1IxpkqV6~8yskyFOIGN3OLkem4HNAM)MTD(mvj+1 z!J}cqFZs1=x5do+YBMqHJkMOZwXcd!f~Nd93@@+ls}5XFx62+Lz71P`E=B7Nf^w(;HQ=&~awX&ldNf|u?PuTniZ2<=VHPQ0>oDsn z_KO|A6Ab<9k!$k4F03pz(g03$`n9H-#4?k4H@A#h(PyeSy1$@U@yOBMR|NhC=!LDZ zrlkG=HL9%7^fofJDENnXoNa#V-!hK+1xUFjHIbtD{C>7=|DjPVQY=V06EuF%_$g;l?sY64>4_UeHg|keG)0aFah=NG=)Nl5 zU@GQe|07h4Jxf$??FwGK|(TwawN0)ZHv)C*2Y6 z!XMrUR5wDWs`;lg7+rsHBR9OY(h0M}HlBN0O{d_@Cy*>F^*lUp;u?Hqgzlv}1B$gj zCfaIMbl9&qH}z3@;QNqDF;Q8E3UqQt_)vFf?ldSU$}47l=7i!;RAHKSab3IvW)QiF zP%$133T%E)`oSnHg9bg;%<^kON^g~t+JqQ$^YM#TBCtORtby^;eFLanhRkf*nt6B# zx~z9av*rg9x3sjp(bWyWq7i&;`Yq42M=Pgy>Jb^iX<*fm6L7QBK=1jy%0vNb};<7f3 zzPj}}?0aNP2_1YSDETl1N>I&7>uCIfH^CiWL4LALHUCq;*suaa*z2mo(LpZAQS#7G zGNQAstoxJ^3P5)j9#Em<)9FLavH#WR^Xu@aCqE}_GtTyoCb9L6uB>2)p$j(`KZ@fC zcg7r5&_kR}W(4E@o>+XIh2gGXCeKVq)<3uhQd5PCKA(|yUH!>FyESv!(&~e?rG8Ev z{XmrrJdplVud!FKlM{BaO=s$9qEiySva_p_Yc0ZXe*3OsB3544kIL}KW5zH&ympxQ zQYgK+mVqeKVuRt}Bx*GA2|u;atnjABqhje4?xL=6Ih7gRSEx_^E_W{K)^SNjGZ}<)|LL^%X*AYN!k4xdv#$So^zE@& z5N)Dq7d+fnYil*N_~v0?@9YBMx-&_?t}s7QNA!f4A`IH2UcEh1@qLabX@hxj^5p*; z?4wo2ax?jmo5y)xvEfg1X_ojX^QC^wgWcPgYXBedTWqrfpM16YEa!`)spr38kI6jW zEYE=@OjrNYe$4f#vz8&5uKHp6DX#Ou1xoxjp*0+>{iV%AW6WgtJK!Qolixv9UQWFe zm&OkHM8`^Nuo1_kiwArE<7MF|s%)4tLZf#(*Wt^S1dPsU_+Y>xr#5t$XJ@xRUTV!$Pms@lN5f9RvhVB==bI|7$%uT3X0TF7z#S~a@Q1?5?@ z36ktRx*Pvo_-&Rw-#Zzyf7wl&_I+pRrJN4riC#>SI`sI|LKWW*&fZ_wW@7v)MhA$T z-&LJ{^g`XK?hTIyHe7(P%y`LTTMJ)j+|^dSHfD>Xc)ZZS<^eOeSoS^+qV}r8-mT;+ zPv6s6J*X{v8mcLz#>Ece22TdqJJ!3%V%RnY)!i9%3d zVab%+N{^88HpVi`UhxGEVj-QH%WeWcOT7<)^HC`jpqcAh zwD{v_7-)X@{zH||w2#4$c-%m%%o|u&Bp;CjUwHPkG#-dus9tPZN#c5Y$_$T{_X153 zrv}m9!Wz!{HEWrxQuyyi)&aZHmrf#-<`*2!)fu9c>Nc8#DSmSeedA~?Q7#|p^~Cb; z30(r{!g+H&LQ_S&#VIyFP7&w)H4FQa&zPbr1<9(X zm;M|3bz#Z8d*Ni2bo47AXUOOc|Yny96b+5Rx78LW-fBrN1n{1l-pXr6&#bj%j zmhQd|`8SE2G}ahnz{e6`6ZnNSC3vY2 zhrf6Eq;T&+<&gSC?A)e4bcnr43Mckftckb0n+L#ENDr?FMIizG1ruBemRp4 zZ^-#MusH}8PiK&DhQb_!Q4DckM@HK*s@97Ur7rQa?PLutowI$9QGlqG{H2-^vlPn( zsteRv!}W6p?xf}Q_xf$_@2=4TBu5-7L#S;G4WjDHXd?LOK0&0CM(uMzJ6flDFpG_*KB4N%ll z`}oeLBf|-D1J~7%8`~4u0 zq+!17zZoldW^OLLzMik^a#sE9-#>Uy&y&JthOPlMHFa@SmD-a0|7_Ub_~4MSfj4Xk zE(^c4>b~0UInif?51HfwN1!>dhu>C4j#2v0;Piq_SiwT|m;tA_ z>=;0?0iXT(LP9DGdM)EF=a81(R?jkU6NIgGD-1mD|2g_l)Qyg(d8Ot9!*g)>q6&&V zdb2Y9@y-VQ&`45y<7fObK72?)4I0Pa!TYM=wOmcaPz|#;*k^{35=^kZr%|BLuPs&K zN#;Y4SYWj;V`xwRw#G8=>meouII*DGk-{$~_X9%Bn^?khz+%>Uk|3Ky@yyF6^_7mf$;0UPYAb*C@sRvR$fAqOX;H1z^M9cv#qF8 z(~&Qt|F*}8sA-m)DFF<2A0n8irJbjbu5Mv*uN%nT=$lJ&yLy>^_yZY(tg1bf_Hm8GKxaj>&CWcMm0Wy_<6xW1C}&<|0QpLDQNoX9b$E*0S!3kDd`?WWnz9^j zy;Ej>aPu8nBSNX{uWRw1k8HSc!J<(S7vku-)nN~o(gd&pzF=ceb7bS91qlNEC-@+5 zZDI*elKj6q-$c%xsW2W~y(Fvu#75%mD<7e7>{9f&a1`aGf5ueA!0T!*n?UuAMc>~#Y)9qLeRneYHP zE%{Y=uALJ;JV(bcHldX1&YY0eT$0*U=p_PxW!nl;OPZwht@z_hQPN5B6nlAW$^S8l zYqF=M9dR#n)5JyU@`JnS1Q=0I`vJhie8Kpc^(05|g&r>0u`K#x&(GFjMAuugF&64h z@}~;9TyeoSF$)rN`Uz&QasF93*DXpO z)ml3UL5SCvI=0#5NsRwI)V-&Kw%SppO=X4(1r0A{`ix@X#(V_mB$bYyQc~ICCgr3^28f+|A1EV3_Qd^4bEke zho6ar!1q{KeT1yEOXBy04y2kJdpg>LFvvQ!csGmAf2XofXS z)S`g(jSm5Sal0*2H!5fzpd5UT@vr}eNgIRF#CbTgGSJK?F+IdftC?z7%}KN(LCGMx zP>?exYdcchB1D}UNjYl1=Mu=Kvjaexr$w09Prr_dv6{F)xl3=wHBtQ9D`m(o z6hs|IsfFc)E&rvNPqQw}OhBlORB}wnIR>%NHd0+Rv)C2cdpL`8#5N}7YdKe`D=w$5 zpWr9^mzGtby_@%uK*VZBqagYwR1?XX6R9grEFc_<=o6tyx>m85i$c8ZQl39~6%AK80^4qs$(#%oGKk61w2BE(0uY)P6JGdN*<`N~yljN_ zg{{f7L6}O>Im65v*l||VpGT&}^2*MCT+U%vOsFfq<;hhrB*mF6*8Z&YXge*o@1Yih ztJ`4|9?_Up2$KiJt7B_X)_QQ*vIlt-RG8&ZO##n~w2eS>;14l{wK>;>V0G%?RZ#mk zfSg(A;1uy0Vkj1?Qg}k4yQcXgfW2)GjTV z8oqD3CrjY@NKrkqPIR&jdP+v=%(R!qm;vP#1GPm%a{7eD*BDElc=oHH_}iXevIeTF zvhhZ3`Su(-dYEg9PM&Pt$7Af3&$%B$=09g;#awTM(06+O=`T`>RW59R!!ZN|1ycil z{O|1P|DK{g(-l7C1p*Bf)NYd(7gnz6DAFd}`v>oZi@y|O(>iO~?VLK2;k{vFY)A?n z@G3s2pob65%5=%LI=dG4R7~~ciMXlCCwt#9ngb?V6!0|)0LY%)wkV~mGSRsA@op|I zZN|on2gNyRe?c2g>1@MqzKXAKc9Jtur#@Wn&_;-d8rEu6SHA;{MtSoE5ks1f#9aCy z6xtHN-yT9iVd-=JXi*rJyoFi2-FI*I7RAz=w2^{J6tt@1wSlG92~VoqS5Y%z?H{8e z(l$)|xNZ2r%}o#zmoSpgv~w0@ClwqbNxXqM%qY}|#xBhLy3$!e%}eqtwM6neM9lsN z9klY27b4Qgw~?xL)x3&kJ|zBR3wAuJ^6GxX%T$dsGdiJ9Edn66AJZmTaCrW=S3?T} z71u4wQ_I#AvkQZxfa@Vgj40;7aMZA#MwV)6%er+hg8?kYC;+h7U7;mq4YpIY4mYJlX ztz=YLZ2g!8-lU~i3HiJ#I%38{%1L$k0?4Q{K<h&f~z`vt^HJU^wec1X0w}yk+r&y~4DY>j^6YZ>@RofUs_nADhry11%46Vcl21?2+!2}ytt*xB?&Rtdxo-#PnF>xJ zo#_1&okPKl-t9FXWHn9V4;JB`8tQjl&bxh)3D)gtwIfuTZ90-#>a=Z{_Q_3Kc0#oH z%2_D;0(v4wLoV+jS~aY-@shcs<)47~EKm(8TK{_f^*dBHqzh@-=F7_g?_#Rs__lxN zddT@}H&oF50fc`V(&F)^cWaK)UYRCB1Gm$Je5d}I!^tO$W!5RXcKXAWKNVBF>%jJR zXser49B|VV{&TwTfV{Ueg=>DS76Nafkq8UwIdU5f;D=yT_p3fDwY52tt*={r%((KH z5%eoU8m9HIVR-&N0x&&7L{H|Pp~jNwL$#LvgkSaQq1bUOekQhY%r z2~UiL@LCzqtACaSiKbYJh7Q4u2TB3zw1(6i`@f6BOx2bZ4O3cn9p>Ne7I!z}n>b(D zxmL?-YQJ>AnJciXRt)mOo=quEBBW&wg(>TZ{oOCj{9fswip0;IoSVTvsu8WsF;Si* zFlQC9A|@zI6DfXP!80bW@RrW7Dpv9CD60zPGg)b2WuZ>jqF&nXq9!riD`A$3%25>; zeVV_3<5q6OwE&h5Ok3;u!iW1uVw5esiDwVQZkM9jI49)%?CHMR8-o-nDav0);j*ws z;6Q)uXvN_uL!7wOc1ryFIH1OaS!}(0oPg}1of*QJ>ya+UU!ItDiA3>9k;B17;lj94 zObp||=OAkpL{uR5WK$42{Jm8^*`Jdp^HRRwS!}^e+c(9H?4se~gIJ^&*MD$QCO#T_ zMt~d&Y_U!Y#rUzv?6*)TMBghFGm55>0@+olp3P8!5EGRWKqr5ls3a0Zx5TbJ`oj9< zjBQe{aJd2z{Xk_BkV-B=8$NzWMdBNP!mB~%OBP)0??Mf*2L(Vjy_`sbs8;KABf{0; zl?3p~V~jN8&IY2C#*8qS6SHJAOL@H~P31qnsf&iU342jt=iG;TWyE40hYsE^5@8~9 z6wE$f0r~?ya&;x`Dd*LQRZ%6z%|D9sLV& zwaGhRaC{w6G~gS+uoeX>ZV$m&@)^pVDR=B@3+C3VwJ7E zy{L=5sp975x9$IrH0l4_=s&NcdpHd_U%N7w#klK0XFDM(Xd{!(t=T_GdIhtk4qsPp zY8GHi+pNO@5^Sq_QTnC8TgaURu-$Jbczd&RD#BGEUee5mq`-?EL9wc*plICsxQl6L zB#Y23I|y}e?`$QBY0(!F7R50)HZY6L0z#>8D{6oL_SZv*7IZYA3&%_X@2`%!(vu@q zJgYL}Mu1@oTu|YmVHO{c^L4(D5`k9ff)KIGen2GGbwj406sC8Cx zv*3P8`W$t-J$D}Dgd@}KN8tMSL~!#^Cwgw>EA?U9gD}hZVbnBYW)?h8}qW>=z-@Hep+Vq~#3zk~# z2Vt7Y5agqs(|)JZM!t1-D&;)P`tR1FKbKtxaK3OuU4BQUwmd7E1Bu_=46lEqQLu7FrcWnN^nUxU-w0xGqEZHwmw zgU+FKmQ{yuL|D5~$tcs4u{c~$$r$4QeFz*eWr{D@xSxAW4H$A0}L8Y*Njkfy0S zP<3#)D5QWo>`^}8lnkymnD)z_9PlnLJ!{IqTfw7pv+;EhMx{%;J8RCw4Y?mWtIt5a zNS(82^2MnJI`b~YR5>D!ID7n(wv_b()4x5Z@Q*Rd{upm8PQ^HqAN9{^;F@~Io5VM7 zwngh|Hd4^c`Aa}T*J#idCyRsjAMA4_QSDQru&=)qdOl5()*8b*SK)I;9uAJ)6sO)M zF+p^k^$WpyujBtX^W&Q83vQ_gx%j-p2#Fd3s*qu87@BP!H8x}K>X#~_HDk+qJ#U3( zzSY%)xsf*pEcGQ>H1N>Gd7|^RK$%KwcQ3qJr^)fHy(tALL`?d!)Pghz;pjVkUh&Q% z_0*ZQs0e+LGDm>3wdzuq^+H+%)e8Pc#v4?NhbFZCy9F!pwx$A1@)~ z;=EXG5Owe)y1(Waf|y^K!l-36OA0+l#~2=InDe0YmYD37tGg9z_9EgrHHvR-aeZ~! zAzs`INZ|L3cR$Yqov?3&51i}=={lg8TSm6f6DYuB?kUlRKI9_}>rj(n+1Q>lT#eQH zeoQDw874s8Zm6|#DMlFvz&O~kBl0@{250Ef;OwSH=bPcvUT=o}liIfIdb;A;vo7>M zLHJv)(o8ye_0v!{Vo}e3AG3xgXuID`yHgKXWeL1F`E6#%|K^19m6eL9M27_bD`3dg zXWD^bJkXYJs<=G8q{BK3@nJihS=Yf;Qedf4Z=5kUh~ZJnU0jE1#~f}O&+DZZH3<=9 zR`S1&CLpU_sQ$(Ft84^2styg0GDisj0UVK!Fy@xAM<>6wb^9*tsoVcXKPe{buM=|2 z3N_~pR5z|FMZ+*ZuIg`~-dnP{K#Z8LpWf)FM9#j*n$nav^%ijV&`r+Hhc^K;+`^^g z%=FZV*?x`p^^M|8_)D2E%MCrl-K=_0G-XBtWJf-kiW*+MXh%B4T*%CUt^|q!R1#2> zzY04nY~u-Audo64u$T&V^3YvCLyDdFUIMczxK~sRZ12OA-#=v}Qg_g#J=fvFhKws^0iMA+^xhyLtKB!nqRo8_VXcND-PRb_r z$d=LiF2@CgkB6H~(BmlaIk&#L(ZtG7E~+J1VhrnW*a3nHn4ul^8{i#NhsGiVZZ6vy zKy%Y?k89t!Ve_WgAnr{|)Q4F zDTI+{`%22N6L=si)tl?uFxhh1bzWsfaPJ;vPZDO_mp=K@WAq$&nw7J-cet|f?a~WO zOK0WLurDh`Zpi7OU6GT;jh?+Crp->Tlvc3#eg)Z6+GEtQjN%(D^C)vB62imo&C)A=?bWNOpa*2IK%c5ZHFdb+@v^M4+O4@dC7oqvhgA1^mw)v*zb7PNIXl9|v} zg9|6Uy~zOo(fr&ucd|x3x3)_Bm@@xDpr`Z!umL(?gIf#BJ5htAX({%P0x5vq1NyL4 z!MR)hYjtVGi=ao7{S#+{DpZqsV7`Yk0oa({`@m2;qaELv7)Aa#RMT6~Q}c>Da?mz0 z;|+z?6ybWAgjf7&=QW4XhHnR#0#4k>-4)DG7l?v12n5<7C&u8=5v|N^;n%Za6&To7 zPC*)KGA%vqvG~R5pA21#sV|ZU%+E>mIaS&b{f4%Ek?nK7?i14MNFN-h)pJi0jFTY8 zB^itBNc71dvUl9_fm;}}P@HA7qFO=UUfhznhzNW`K8PM1)pe6Ut&Dug|MtI zM#*iateEJD-<{9iC%C0lsFR6h^a%h>WydGm>nhzH$Yr%mWJOoU!*9X^kZMoFV{=u^ zLPsinwgR6#N{eqC#f)!gtF?lWJ_Su#g`e72`z$5m{WEHq947x#6IkEBNXL&kFGubv|6UC zF`qD<+{{;$(b{-I^LGN16C`@Ai30!Z^?usQDQ$5Zb7_UfusYG4FYh9CeH|y@owyht z!wowBSC)G-eZmlI)lOemjollMe$pnhL6&YSzWJeyjOS>1b6F$6c&4?_Pufwc)*5c* zFf1mrfHm^cwSNoy2xi`INR!coA2lz!#jmMwoY_Msqnkcp(Qgcu5eP5A%vQPFx*65* z{n#<2?&h{^m?K!9Yw==NGbchx_oc<{<#*^8S@dCCq0OdX3?RnZ2WhoF zYsx`hi9dAXU$_lf3c%fOt93HER~0Y<3qs9}Uh)eH8k*Z^SYnBdkE@n5uJu8D>IV)6 zEp2SRjytJ;&6v3`bIj*>=bh6Nv=MMkztdyHDLqoyeZ91&*9>Xz$#P^|Mj&m`Eq}cH zNccuqG6b{sbjUt`;X%rCLxe6Guw8EPzj}Nr-`@#D$cSCZMIVpy;bdKO@3XiR? zmOMG>Q&DpQrjy}yv;)>1tf2Y!pN*|OZi-Q}R~V92FNVjEj>Jqv5W)ZW<&wV0bnWNW z5{L}-+@vlN+Py{;gz^AP-B!m+b<5^NZl;AwjDJ&ziUeFcMN=fTyr- z6m||`dlC)hZ{3SJB$b5Qvtt4Ss=0RNSKN~2P&K4X7Gx`Xd}6+|_GkdQr3(7QD#mg( z&-Lbkx)<8Rvym2NiX5lfigB96{K(pD0lQzS_qE}oGMMKm^B~ZK6x({DJ+P#wX07X8g1-0>#x4BoxLbT;qc*lfupylcpE<3p}51QGb~8 z-gVX%oZeYqw&bg92>aLPy9_Sr#*6u!kY;I_yn<;`3&c7kYwLu4_)0vl2QDuBiWI+~ zWLL$m?BPsFrF!+21@QR-=y#T^L&up_XcYX8V;-Sj(4+j}M@{okoA(o1ZH_CrIR#U^ z^hfhWQ(fxhO6Zuak?IKS{5=y-u2X+z3ssZw+f79evSkXes9w?67X)P1nMJ1jojQr4 zh)rKdyf(N%dHL>qAIf~Hz(;ijGKFmc??oGIws!`mp+92;ayY*9A3Sj6jMtDh$0XXi zuWb=Gy%#{@NiiO}Ec~FW0*G2EJvB=5Y~C2ZKoUSOxTwA4=SwRu1O)V7SKp1N9D)(d zdpOWlMXLtfTSe1IASTQOm}Fn?ebM4n)O4V$`j6q77May0Bt%-i?d$gv^7i+plP3No z;&(C*tgIG`)xq9WI1*+3qIm&O9@lcwH~+q^0D_)NVcB6i$xj(Sfeue-TLv;mxVb&M zap%xiiN0HqH6@z*q{$W+7-9~-wkG8CVHR{s`%~U0SBlSNMf@nY6Uw{!O4J(YsH9PTt1>NF^fC8`0~j7!jxuvb^oPbK!% zN(D7}(k~K?@1=$ENq$uKbrCQhXRjO^lukCUr=d{yfE0_Xo$^2RMIcQ#B|1_dW*i63 z4BHA0j03yU*25pxt$bj?_z1-^Yg3G`0y@d*1Hu5NrvIYg$kL7Lr@ad?myK_(TAcv~ zjz@clMAp%d1A2TM^J*+5HhH3$HxVyL3;*9Q&o>wWDR06yOe2n;jgHjC44c%)B6NPk z$3(}K_0FH!fFHpI?#*DVcO%$98?Mef`m>)JjePF(NChS^_bj2evI50LF`ILl2>@_( zXkLo{=iM>(irdaR3y|)&r$U!Ye9yx2XNiu0y5dih-|O4D9}nX?Gvv-A)%O7A#==;e z=N}H9W}j_$1NvI`$L%e4W7=*;^Yev|-zgFyT#mpGKgi zN><8-16rGjl<;l*OrB}KMH6Gs7Su{j+jDyh;iof?mroCmfTVDa_vSd;^&IgnC?+KE z0#&CmR(r-1$IG_1pz9BcJ2ix%u>+Y!)7ehS3!j^HdGLCvdC0zE@|L)!l-}U(0}|5n zrK+Nw-bT^ACEiO+UMq)RRAm@ER+paK{=vTVK~o!v=gvp4fj+RP&^*lQ#27hFMI+vA zX^m3pX#4!6mZ@!TGZyM|Dv8zojEW|EBZ?dop74a%6ZHgYHQ^nwS$l+%)#H8X-m%yr z!KALUtA-^GIwZ1HU&V#jXi4RSi}VV6;@VTMGWqm#`m+XZd-uy)kfW#u!;H2_IC7VulF8J(e@^OxoTgU^L-c<{x33*VH))S1Zay9HbnSg32)SH}M$n3EqJNGRV5ejZP7wFzGK?MQaPb`-rzhaX8?=z7KRk+yxOF5fvc%xEmIk`;y@r10 zabFfwa)*8hyr~w#RBj^GDRlOC9tTg3s7)xegQ6mtm+I}8bfGHo1B$-bpLfOgKWDb0c)9jw*))H=z9=w zJz_Gp{}#(pWDMvv!1A;UHleVHY>6W2W}>5-cwbqFy7NagjMiZIr9TDJWVWv2mlw0@ z)HB9r8IDi0ekc{PlKsU&AVrCDyDZ1*I>r{ITH;=@ykT>4UW6{b`>Z?~w^g9#)+xin z3mdy*=WYr}nuo^TG0$c*TV2$-ggPY~L4^S$`A@T~Zf7wti*$`0(0Wk-l3 zxqPqXmv%Q(e)3v?sMq7q*ku=pF~sQN86xeOMEE|TgaKE(RqVqsmWMV;a~5J|mFd9$ zM(PQ&d{(coWSPYOkJZq^Cr-QtKGW>i5MZdVnl9#w+5zFN#^viB9<_e0@mFO~4xq9O zDx6x)ZQZ*e^aGA#W``&3Or*Nzz;$%k!z)LBT#$#ml{@T$HM9qAzVR%+dALy+1l+mx6yuUh1Wb$bMbs7&dEzC^y6g4+;zVWIr_Ls2uMCJ9*)Nod z*xHi2W9Y9`f$tA|PlCoE-dmFH3|wxWc6{hNain9@@hJ@4vW+&8PnW|3*b5&*?B+3>ZqowIb<&WKh~^B=UVtn8Wj z`MBX>f1w3~?aOlsi4hj#?!}k0rUY}C8Ba$B(~uQ|spjpw)W-cTPbyk+NKtqiIppV# zr#c=mdaei&r*cs5yD}Y9XZB$)A9AVURnUA}7#a@1@S5`HxCOavHkApk+%W?EHa~xF zF7>mk;TXuQGR$GG?PG@zz56XTd15|$K5i*gN>;?4`gxe44Qqm(SjxtHyg8DR6xX03 zbZ8zz!z-t26CDaDT%q`}V~I1}5^3>=R_zs9ljJ)BTrOkm>80OzfJa1ebm8k7$OOsc zufOjP)6=4}x;%xZg_}NGZz?K*A^N{J-+S>aJN%u|nQ++-oKKq*PTw6JJOUXtzWH8R z9xMLa6GN=K;dKarn}GL$bPu3p&0D3Pcf}RjVktYm8X27M0Q_R0N0NT-WIleB!kNh; zXD5@77KhHj^WLET7e`vB&pVKtI&LZHT#9<8*%Zw=l|F3kmp~7G7UvboK`HuK>S`x( zU|CSpqWl&{BXlM@;)GLGnh5}8SV`x5);_-i?Thp(>%CtNI_ zu_hr|ig-FBy|KsxiJ~^N9K3MRFkW0(Ra1_+AVTz%~ zSSLX-Vt~1eIX{9k4cV&Q%Gdjcefv>cYwWU*y5%b=qrXD)DBwgsF1p@DHtOj2CzkEM z3ZSomO5R2rKVA-D&3m*5VKySqEVT^o|%5+JxFIE2Q7<-Fgmb8FYB+CQn{2i?V5bIm!&c*a^2 znla;adFcRyrV_{S-XtYuS3tojNz%^R4nt$YR<;`vaF~q3vrqAG)ZbcuayJOsn}5@2 z9)S2#zNJ#r2y@hhf!Kk+2L)QdulTfz1wfBw11D&%G>nIneoVmJym_|%iBJ~3@R3SY zl+#OaAm-E@vrKWw`{_uKY2?ej{8VGH6^Yy!->;SoBIBrYyxeJ{ugnCDn;lOM@ra=aPbe#|1r9S$&sYKZ?9#617n$Lz8MxjZg8)> zI@eR?g0K-LI3n?hKzu}KR3bhUM%<<39IdqUu`9wUB+yKJ*IHR^T(j!N*=R)P?P;12YMo9`MgzNWjVeHo2-E&cG`1XkE(a}+hz zyXz5i9icPRosfQyb7g?KR_xn|o zd-`Fj*>awYV!;inWoasI_6zd4O`^pTeLC;)u`YWk=TRR5)Y)bu{R7s@-|r2`*y@Ht z%DFP}*#*m1##Ob6G^~5jnt5cpAn1F22`xNxpgg%ui@;cPp~T#LkgIPhgzEv= z%fuc|X5NC)-KS{Gp5;QlN$n|w@+$;AAXr;cs9S#1oA&v#Ab!)LilwBB`|p1*jaow* zE%A)9xI#`bG$AY%QEG`)&Qb{h+zZu}q2s=SA9hoW zden|F{jh^oa+|RJmhn}x+VI;~vY7k%^3TPPa$I^x48GCjRif245ww)(PuTm>TN4MP zI6GSsH?Qkm6w!j&`^UZFm*X#Z52My3gzH;xRW@zvla`sWEvv6{&$v1WF@Oyy?HE@D zQo5B&;`XuK8{@VQO?sRnfE#`HXUM~yQ!2%OCh#Bxd`uyjBM*Baa=cBfw<02;gI_gc)WWV}&V3+6pHv-#86d)(WoQ`LcIT?<9 zV?<4+Ffqi<>2p6f+JSl4=)Gg1B-Dr#Uu@(17Px`QOi#&2e!QCBB+Ne_(~9wnCZ^1r z@3K&x^=sg?QN2yXQW<30U$Hc*b=dHEVMRF}MOhiFwwDaWdDV|73w~r4?0>*2Eqcbl zOA3ySnHZ4%wQp+n;$ga*;uAcUR3len#+` zcCz@nB#X0_Nt`d~fv?+S3a_fE^*T?XY%h? za|R}K!+Q&&`BQZOg-oE~LwTlT;qXwGyl@=jw-BCDbJ687=lwQ1{;QyPUaZ4j&~#Rp~}#Q+I_b00nalg z;2(G4#iE(*!tl-JK7ONVzRKYakKaG_ZfTO^fK6#Pos-Q9h3e4Ik5_5U`P|F>G+4sWNa zdByz|JD;DY1z`bvY^b|3J1W%3Mvv*6@doY2XlmI56Ze7BHwkuaq5F)b$(c`?U|np; zlXEEadGm6tAOyC=!Y!e=NL7Rhgqs|BS&tjZDo_nggT#dKbWr1c0=c7XMCw6`q_TAvuyM;G^@|vA0$6|A6 zhoSstv7A$MrdhANC=&v_X7Sk%ZtaYy3P-wMRrrlgm1Nc#15DRKl2Sb`zu^q-Oi~Ap zNu{M}y?Ynalk4BsUZA-CQ%yJs#(FYiqG%n+c14!7|Et15*r$TBfmyS-p_@2MlYeI| z*1zpzc`5H7%=~Hf7J6@`#ZPE&#O zk(HG@uNQ7vjepnKF(gEwGUnDKR=s3-72JbB9)8>to~3%66u7EYPjA#a;B9}|s!KA* zHzY$DiVcdtWiacr;1U1mmTT)I|M`SIY%M~hhJG!fLq!V|U*NgxGTZpccO4nJkAl z*c(SFx+u(@6)z2X z!IFacyoQvo9rkTAD|L}*Tm(Q@NARYqVhwuJqsJANb!#Eo7)7Hb5V7!A*7#_v%QlV- zN>z6_|Dp%i^aInDT}R77cq2jE?=<%Fx0-5FPmhx`ESbtxLdQfbPh%b`GBh83<@XhS z_R8)*#yZdl&NTJnjI|zxac$zl(I=aw)N;vi%22uP}`7_+$0Nd&K;rkdOX3{l@8oas}h5wlk#MOl!Z%AchR__Y_X9_Wt zfu&E~PGax;Y}O0wM<8rHlKIOO+LGK?u#{BL>@LL9Fc)j%V32xwy@jiWUir0?G1f1& z+|+9`0*U~}gf&%;s}Pi{0BKjuzr5mIB1bzN1lg?t7lESKKh5H_R{(e(h31}mIfzT zKV!aHvc$5eb%D;3nsdU$A`!@bmB!p?Cvw$nd+Gj(h>?f~#fsgxp@+xcd`EuB{2Q~euXVM4_JMxk`~178KL*7dYFG@ zgSYgiybf}n*vXyP-v?!o&U_)rq5Lkc^v!tkx@y2Pe({sqj5#_#pBYh2sa?S^@4dN= z$ZT4YT555VEBc~DZw{xJ9}4)IXDf4s3*Z=JQ;suwZtE`uly~baes27dZo=GCxbfCr zQ%yr<^Ml$S_#T&T{WF@%7tqW#9}>kup#Jeha)wOc7gE_O$V!FqA8-L%&Oyf8dmaV2 zy{Pa1DX5Dp`6+KRe%jWsNjn6>YY1k=1?#bdw9o~yF)=Z@?hL`NuCI@-tdJ-rQ9%JA zZW6%$nKbxcCbT6n|KhT-;zVBfN7sNweTuh#N15J`F@Csd)L1>K?G(0!1|LT_kTpDI zq02*QgW3W&v6(-RJCQoJkA33;ze(!el;;tNT$tFUU`Q@i*OZ$ShIi zWe?%|ceujS62sdkP0KSyJ+i%!BmrBucwnq~JVr1>@9e#H#9 z>qD8vN*dyDm&!*F8XlIyyBmmYzBW;ciwt*R&wp7N7NAKxw~x+!(F)74V=o zMJH^F!p9{z2AVmZBYPN{viqP#BzzXWja8EV#pJja1cxL307n)4i%Q(nR|5MSW8{^i zaJ6-qh@4)Rc41w5sLutPRfAm(7qY zUDf7nZ7r!$UM%P$N~rI`9Cgz1)1C|#*GAPvB)x%&aPdKy@wH=1nmG)?!I9A^zmp8+ zp-UqRB@9ySK_KpBvm27r$24g>Z*q*N79n13v=5hir0Uup!?`DWuqcYcT-N@#BXZ!X zH4nA7(#d9GI)+Rf;K$fR^{q)A$~a%0R_>XIbGjQMx}R5RH>polr3hmf7r;sk#-7UnWV5~@Q+ z^L|IroK3V}!)$*x?k-`1V;s?a5);JZnY14WN>RQSiA_IJKkxIz`(2ypTE!USwL;u1 z9F>`0DmrIEq;+9HSHKVGbD;HLu12?d#$vbfXeIpjk1rAtz(n4dRrhK_ojC2wr4ecG- zBrB1L2lu&Ln-0M})SzMGV}s2Pc!w1#9zPUIi>L&VzMMG{H=X)sZ%}soKSy?bHiCE+Q4U|pz^WV5?`1vh9 z8ha9;f5ix2>IimxFPtKVqQtHIqNsQbFU6xujdb+7fEqCmPs&}1`Z{(L@({wUFtIMy zNLx{jkrbRA7Gd*BTYOny)?#Fy6cpO;@*Gp-?tnzWS>r6PqD~)IOO|YE65aQO>{r$g zy1`OxsAv2xPbXWGgN$Dw8!Ne?*7{rfdN~INO3@}``KJoLqw~J7!kI|~vU_jS(F!$Y z*DGHEgFLpmjdxMCSB~$oPin8UxIS?2-~D>*6tMEMWoH|T{on;4>B`p4H!vqrj^83vIi!J4M=w zoiR29l2n0d#(qM)I6wvu!Hh`L6NVtOa}vSz6pq*9HS5*-O#N3PY5(c5UL{u0NQodb z=DcB}n7n*YZUo`rYwp6ir>HRdo>WWEzjO%~-0L zWgpVWn5zUxNETlZb4RpJ-=l9#)9(${(DjEeKw(f*Ot`=O})08^PI% za*5^q4M0G<&bM_&03WJo*mz4r+&(2*Hj=Y%j3Y4dEm>Vo59#%|Ts?k2b3XjA0@}em z@-ojhHdSx(a2l}9Qftz8URIChY2wqf7ILR*=PC~p$8mFch=%ig`4J#>Jg}T$Q_>Q* zHBtZFk>BkR7G;rd%RYX5zzbpp>g20}YX?YOUH!S~-|b)okPu=91c}gGZU1}qX=`tv zZ+!=Q{a@P6y?-Xk+Y;CJs|5Q-Rlaa#a)Dr~vm6zvKr!w@Hw-=orcH!+s}Mbtq=!oD zAFP#LTg)LaVAfKcJu_RJJ%~sgRT7I=Vh5vyu{gg?*d4zSwhODAA**r#(|_B5YL|hl z?!rd{k4mqbTeHnu;Ko;0tpZ518hP0P@NAY_MZucqZE;x@&Urs#*RoW+L->A|Adowo zLT;NS5(lt@Q)OId$FOcDmz4I}mg=2zzrQ`PLm6X78Da2awTF*W0mUKf9eA$DX_snT zRqU$dpOMMm=G1*2xw*KSjatfem)bbgX z@D&qB0EV!x;UUA`11in;Y7rjN2u6qmJHJ}7JE(Z{PKmJ`lJr*RI|}}Y*2Ch4F*?!o zgi%Jv)<~Ef`poPOqjfZfo86K^)TG4&$~tI-gP2PNFO(J<!a(HJ5^yKp0a!A$}BnbVXp4qj9!$#SkQ2OzpF2ikdv84jsPFJFU!`zXN`UIKZ zG0p4wea02)>&lTRQw6#F3{fB=M7Ll3AiHIDaq8CC!h2+4(Pd?>G~<+cBMr%5v#{IH zs8tG!81-R)WsJFTQPW0R#~9yW%Lorv`w7NNpLYbV0%JOc<$vxv7elYFkX>7Ilcb** zV?0-6(ow5)1xtCRkZ=?376`ZnR&^*_1|HkC2~~o|W9}Ci?J#XA_A4WmnFZ>v=Va*m zQO;!VTJ;gAPZiL7tU^>R{ZC^&mjG&O8+BFPvV7QJp|T<6L8ZoZ>0O6sLI-5Wp%Yv( zvoyXQaWK+eJ)OMcKxfa|N+3M42nRD#6802nAdgF52E~tVct!>|{@AssQ1lR#X&Yn= zJMs@aeiUD3kC==@9!a6tX$B7nUhj6hV+SnH$b}TtDo;?V{VDR29OpMBV0bi~d!9Nj z%%+QLj6G#l@*|~*N@PYhG@~@u?Bg;fnN%Vtf4*(WF^O&%R$L{pO6=#c?qdFi@57vH zVG|+gEw7IT7x3vb99>}6DAa`dBGGd;5UZKlPQd$5oP0b8tn@V$rbt3zG1mtrPWt0H z92iUNTR1*rSQcIK*TdQ#EF|HNgF=M8|8F`wbZh}JdC=7wj_p6OkLDL=S&I@wY3_#Y zJ*L9;VC#Y><-k86$e-e$uGt-o*>vXw|K{)bV`=ZyeSkM{@NZ7wu>XttZ^L#hB8V7A zin7?4lROhxSC_rMg$>aML4Fr^1qWaRdb&qJ1Iq|XIo&=}OE~sEx$fFC&lnFxO;T<)Pq9SsFOsY)CxzKTpdYPI0hpa$|xdVT>t6Jq8Dj{$@8~QA;x($0Btx`7OxO z_7gbLtLpOT^P8B!lRDGkU9mUm=?`Zs50_PR@R1&q_R2rLa@NRexG}n`)g~*g|NY(0 zQeUm4WhH@O6Fcng=R{zCLO$=EPc?KyTlVr@ zP1jWa%rT&^qn)f}og_9(m$7uxmo!jQb^TI|&wO3y%G-oL;W25~k8h)P>Q+*USB2>? zmuVc^v7y>#onFdVp|`}9Sru)K%7bW*GJHnm{ImUA545A}}toq9kOjSolzn_lt_iw{KrH)nsH8 zS+5qgwbFB|91;xbfG$9`Jv$%EK1; za=Wr97k-}3RM7{}K$ptt%#Ar@kqJN^vj%*wv7PMQR-|%Lu+3y~J3sNZcA<})7u89v z`?0Sm#uqV88rWmH z^=T}`qWEhHW$rjSAV(chlPzouE3oXX3lcHT?m=vCFNs>`KOi?vi^H$WB5-ITzH;#Mh14mo|gg zxG!?BAa;grgXlRf@Z(=BUtZpm*#lD;)CfD}bb3OKJ*@?>b2FGwQ$--oZX&*X4O&+2 z_Es~=GVC#a4&47Danu)twF1UB?PplDGAdzQVY4<1I6fgVIz5wB(v%W?LYe<56?Qkw zV?>^dJM=(SRhB6duMbM)|6S! z4*_q)b2;s<4XMycL;~SOp|FEHXMW=@CXm|Sir*|8P4aSqe5!TimtKmTua6#NL2sJm zpxxdd3VIVq4q6|&s}U5u0CgP6+`ZcQ7C>+$At!>~JvUi;v_pX;{)LQBd?NQWE9t zsNZJ4E9fV>&0xai5D{DdNx)G{d@} z%8HAJ8BX5z~V5h9b$py%jPhu35CP+U~hJ75x_p`*-vi4xtn+2#}|v>(u2S6 zj?ybDMe(TkOLxxS8|$U_U3Kn#YV0y8c+#qz7sngog=^DAEH=IAI>(8I6Q#H#-9|1L zWkC044lXYYt63o4;TPFLhoD8j0PbN~lCPoK5JMs2j)K!>bu9nKYHFcArznz%Zm>;R7TnMrts-cBkvB-#ms*?cxfSf{urRB+lK=O~HM}Ayo z4EKew&UUg5-8$N-v_?kP9Jui#ijBS>+ph5Hubj0QvpK(7SphgiRG(6xnz6y4X;r6jadTNvj$rAXecQibvjMecmcceSeXMt>^Rf-sf(+9*H z4<I;haOICy0J-d%PhW zde}w>k}u1b7TNtCk5xz+2CxVxOPb(@c!j5m^*Bjs3OAh1b0p|<+` zN(QRjpH28jEwvSfY&cpB2#j_a?^$ju*nxU^?OWI|15>HJ6R-C+I2=#^-8uUR;CmkK%7K3R zr79P=mCrr%n$)UM(Vve866XSoSL^|CP-zrTpey*a>LowdA8p!(YZp3ujdy<|6@(o1 zFQc^R0pBGsC30#{^%IdUO=~kqznsEgma9Ql5^;Zl>{rG|OkegIMU)f=t@rv#-`AM< z+dKA_a?-kdVf><^U6n8*82s_c?^a=-N@OfA1-ea7D}5%`)yQw{kV#;Wcb+Dq%9xxL z9(6_6MwGcPuYs)bNoU7bRBq>eK4<-ScdCCrM-9P*Fwre+1LK?VR6FFS@!M3O#~Zuy zTmmKiE(Um|qZTO8+w$Bc@pNVnGCL9W8N!)8cl}KZ=h@?0!`8_NpuIQjPQ2rLd*Ba+ zE_LnzEoV~2)X*M#cPgW8epV*ahisWcGu)h_H*b=(6&7F2Sd3`8?HKi@cG3$v0qKtK zl!l)77>2F`?WiuwcHMO=wO0%l$;r{3o#Oh=|3^&wf2-+3(moSQVJ;a7 zQ$b5gP&~Fuakc<^lm1k3*Wk7848Y9loQ1P2F|qsR6_3n*Q4+LtxIECu04y(FS{;a! z+!R&(3IKosrrujX-+Jtp+cTvDdc_k7j-J_uY3JG5OGz;0Lq~zFRTqizW`XUWE>>w+ z23sm;8Yd4s6A(SdwzB4yunFx)JUXJ56+_$)T@Qg>s_O+5ER7EN29RwM;Ij{KGQgXA z6K&%Wx0YoyNW)gKZr)KZ-?rEpQRWZ*y}-*qCsRCaH(4_7iW|Sq()!^}K^n>Um8(Hf zdtKy1TcLux9p%6*+XN0{`&Va4$Vn?Di<3lj`WK83`~t-<-;r+$q!M-?-QPan;Jg)w z;f}xbbDI}KYLKrPbXz)@n9i4rgS9DU2A`3>4?bd;2IWOEZ$BX$nm%J_X)0US?V$dP zD*n73zWpk+(*e_3Rl10lyZqRNR z`MU_AAQfJ-H)IH7!i>Eg77jCTNeS|=(8hJ#9Nj&lMAZG~+x5Ual=2HV8EHL4Uhio> zo4+w|Nby1bTxR1DakVE1(YpzA>{Zx?BE}@Vmri%bwVa3w(lH9zef5R%VRPv*@BD6$ z_5C4=$JJDN{#&bDS=@yv^3cytMs1P>KEB9^EH3)?nZX7ot9zC;yw~92n9Oe@fd8Q$ zx9JN4&-Y9;ZNbRFRLMS(m$nc9Wv;WVnEVBv?l9LftybXfv}eD#MCk6MhgQXlBkUk& z)Gm?pV{P@wTf0Ieh{_q$GJ_q9n%c6uSv>+`Mt5^|E4Z;XDrQkXT-_ggwSNQ?H4w3x zc?8{XB+Fc_8oeVr&rCO6#9J}fQm#3)ntDu@6`8jEWEI9BOmGwK$nAMkmm-~`4P z-%G?Qc}H5B#)t?$*_zvAb6&(Qj!wcVSv=3o%G@r9c~ zf#T0)jP8-Xdv<}}*XP_@7eIs@Q``&@4Lz6+1DZf*3xh$K^D24M+`6|Q2(!ZZ-g+o3 zhQWYjNRJ5!bD{)K?s7J4W5>HhLp_!8oxYmG1`)=~mXbGzL4iPFsX7jp*}FO9ViPgPu{S0mTP)R3?a!uW`O{`;7oU(cj4 zo+zl-F@F_MBY~)8$$h{&GL>kq!83jSvxGewI*mX~QYOeqtux`S&~JfGTt%C;ImSB; zfJ!LkyxlR}wY5{SGFdmqk@f9Z7%~nj$|~baPSnlVrJHdPO-Z>qzn4qma~}xif6?y> zsG+=1gX2ZC{FXAQ=1n+@G7b{$F8al5y~-wI-e^8G)D>4S82R4*+egHRT^S~*&ezBPnO;Uy#77xdb;RUP~Qg$D9*#mnphEJ2 z7C_=1u;j3-b{Vly8PS?rks+%Z2$u^ys(_Ia`*cqa%B>D``Vu4_LdnK%5{cMj9A< zHZwq~7f=*b-ffmGwxuue@Wx`k&TYMiKyJVT|e=PELVq$OnJ5g7Q zJdx1B%y8R@#oH!>&{!<=uGigOHffV=xy*aoVU#de;Q#U|fBY2hrtfCude@k=~} z3qRDY^`cP2=YChTb*!i2X;Yd_akau#|yMyr8)fxiLoey!?s?6$3UOG&)TyJ6{A*SN_i1_A*vuetw@xEGe$tysSjghL`n=^YTnC4CdPnWt_XfbS`XwLs-hqQ8d+<#EY$bL|5WwPf zK!t{+5E~A9*wn&1aR(~aeCVv9!d|3RJ&U-ezrCF>6%X3T3)z-%+gGBeLlKKZBQeK3 z%kTtN2PF1HZT%>)6=no$4w~LsMLCuxqc@Smh^9XIFe^$-fkPrktwW3HEYUsK&m4z? z;KwqJ*#P>PPFu5)72ey7!;yC??!*ph{ORe%oCHk(5y}CeTXYb65Svd^cy-MPNw1b# z#prg9V`KW9-QSE~DCgQNv7W`XjWx1h;&YT`S$eF#AM>}P{jQ#t*~1_K^qX|6`lu$$ zYCBG4i8MBWNWwgyetXWH(6xJ5T)HzL>Cuc7to5GK@Pv%R1Z9%LnMx&YS|sN{fVHw2 zFyO!>Q4p{vKA+Ids3XqLK=6+11wDE_g!_dA{HoQ3v7?e6U~$a?3*~=Te^VS#_leEtSZJ`@#8xK&T&a^h~12$ zr*RaT_kwJ38I$okU5fXb@YVUE}=?29Bv6#gf?jIXEBk0*! zB5c0IVFg-=D<``0-Vu$z^r@0-!C}I6(?ZC};AAe}Gnvqg#>_3ONOTibox%6i95RT0 z7m>Hys0Z|z0>dd-ZEv}i-Z!HMP=L()H^7tA>cve>kqVfHmal!?+&oLtMnkL{o%8UT zXE!6^sGT&hqdYXfUwrZs&v@S3_k>hG@uCpT4^~M5rEUreeE%0VMqZa}=3Cc?W1@7-=-*IzsfjV!P;}F}r5IOPnPaDX6 zOAsb;t1tWCM>K(hwb9K{%PvXbILysJ;lKuh!$z;2_n&)1pjESlM7K1epC7q}#S^zm zv^;T?YigVB?#gUzkr2zuz+f_2A6@1i1&$p|@P$*BB+J`LNt$H9vVLnkd|RkN!R7?L z5DI$4aUkAS0mYj>JFup5SqIUQHB;M})8Oh46vr7?DDbEepq3 z&Ype#w}z3LeAWE#wIjzHc{2bmH8Ay#kDIt1vZy+ziVyHp)eAm~@3Ro^89*nlv?fx! zA*YKNT3MM%wdqFikl(j)6@W<-0YR5?Qc>N#QCVy=*O=AJjUd5P({g1FOf=zM?L-_` zO})Z8Q;-qnFUT^JKhM)LJtxd8fb4hthUKfOf{L>!>v7M|0z>0WMw+USDv;%Ip)%(# z`&y;t-_+4;<8F7Q7I7=SCSaks!Y*fqQaK{Qp3am&Kg8T_Wjood$CQ$pyU5ylnm3l5 z1D$K@V?q866-V8&R%R>6GOwk$z=-Chw9(sm$06eR`<(QFH>QEpGMZ=8L9}y2VCwX* zKFBYv2CJ&YI=cJUBOnK=F;$VtO@{0F!V{^M!w?Mx#o0i1Tl(%=x|lt=BJ;8**pn|H z6yR$K>ty3}rTh4q(TklP43wJyPV`31djUk}0iQmO3!*9*;Bm35expVL5I3}p3=0*V zwC&zk7L224AF@Q15OF1N=UJAIe`&8Im``>!#7u&FSgf=FWvHZK{Eb^q1OEeRQ5YMaAN6e|-rd#l>cSKN8!S*jv)od~YO1+R z9Y}_ltZ|* z)M4X^AwjQwbWovJG?wv-M=`h@`Ms4y%i{py#7=)>E-pWg?3eCx%vCJ14py~@AAANg zSIO0w!UrlT$R7+>Xl2hVL@mA$o-vsdUyQ@&)*-2E6_wajPvZ3I=c={ zk#I_Orqy}GrGfFd)MiZYwI+N?V+;n!!#R?bpGJ_`e<<}OwJ5ty-$v~BC!{E^$DYkt zai3R)DwF6$h}O~#dEu@t#k%t%{hgCO9UX z72#YY@|G?_m{TS_uh13jA+ehm{_#u|Ifogb(efj{r|rv;29 z(UWn0mqxv~ubz$EIC2qPq@#<9h_r1~)V84!G<=Q_gM@~ZI^6RHwFSlUDkkH7{IN&d zxo*%ARj#>Za=L+7>H*%^idj=EgDK)!Ld(#Ma;|o=DE`Zbl%u*(q@E}zc>gZuWe|?D z3FJwTb3!6^GMUf<#f6!m-o!d4V{*#e+8#+AONTvM2o*vyGs>&YG4{7Y{b46HR``Jm^^LLDscPo+7kJG zcif6Sjj@BUt9Nkw>w5>INxO9vgtHnF669dDRA9imi5*|F>A1OUx&X{tXUs&n3oBJ# zFsIE70Otz41(}D@RHO{7Yf6VbO`ND51XhE~5lEbmUEhH6c8M8+{Qe+8ekbc;$NiRm z5K<@rT8ADNM_J*>^`~gJ#@7GOlQQzd+sMhK3Kd3V)Z>Xxc>{N0VJ6NSB?%uN?baU* zxAOEjFD1@y2HV+zd;HuxH`~rGI&0p1&szCc*XjmdW~mIfJ-)_xTi`=y$L@R*FYe!m ztj-X#%Td@?`{X0P(&4AbyDRl)^?-V2mG-N#02w`ueFgZf|~~U{p6UMS#w-kJY^H{WMr9a zkNsIOc`6>Y@n%ZH5~UYeA26qc@bu%T@`*3R5@xR+4q+o3^PMfb} zWiY^CZqw@uHtqLE*410l(^XlCXnnZSG4b9}RU&Ehyx;aD#(BGQvP0PTr57Oc$u1sI zSlMvPSrq%cJs_;ST@P)cz{mWS$j5#`WvMJ4T9|@PW|R7;S(>-Zbq6XwII|)Iy2&3k z9@UawWZgdmzOryCnCoEK=O*nx{Dj!{uO^-l9BKtV^PgxGLZvV#FJCp^2xsOb?dNoN zJzw-50otE^XU?vYIyQU4ZM`lyArKb#?~AC{^SrsMoA9p>J8}Ux3S?A4LdfKW|H$x? z0y!z5!#F`bCHzzk+4^X9mFGX_@1}l9^Vo>`)N4`}-)`>7vm0%?A1Qp9I+-i(;-P4RT$V7vWtu z!)0B{qEClqEPjui0k|0vRUaevc&^RFtw;euSqL<9*EdK3snC=-(S~CLmhK0p;sK4} z321J#&k;R=iD%n~t$+Vmt?t9fJE$l2f>`RXBeCD<@`ebFT3D&W$M>i6<5ibOgNd{q zP1-P?+9=(!2FSf_Md+;ORefjqqT1t7rqT~brS*|}@~sWv%)e<(x%#53cx5hi7q+Bg z#Ah6sM}DVUt*%a4+AS`{+U{nG(K1bNV%u;VS|>prF?OR_#`bAw9iBn+a0D$)9GGd> zxy)CFLr6)Egp6A2e^GH$)$5_707IN*O z>Y!D$>Wn_Jd7DD`DOTNl5^wgn!LG4nMh%G@yUmoQ*sTjfd|Sq|*=8#lrHsa})Fvbr z|6|vNxngb$_X`HOoGvD6+j^XztG*zPGe9n|>;=Oo+M`tp%FOg!dMwGdacyS3!(>1v zpZCjJ_?uuS4p@xs=K8f1vL`JV9_-8=vhlnXIiZ=j1Y>^W^i%pcCD>#!C8w(@PuNYJ zBca0Dj+ijetV4(<-vV?ie8niy zce;jXKcLse4JC0p#E&tTZDS?!pD8wfh4rtO+Jx{_m2bKI(%KmOff~KX?)s^pG4Y~p z$PyI<8`m)YCB`{+iC_gEkE(v_diPTf+#RHImt}&1%w33%PN+8Ll}kmOZ?OOI-4xB8 zZm}+kl}iRvoJ;5!`@E9mj5=%;;q9bZi*g``X%SZR?_A_q8_4=qJ}@-82JD~0Re723 zHPp<2%f?0ZjQa2q;BeY>L+n=9p~YF|m+6iuvV9!u|6D>XO31;7*U3@LE-?mUE2v7Y z6+Uo>Yhtd)2E7j%9}jO&kLbKi|Hzt}N2JDgG*Gu%=7GB5?H%s*+V z!0e+69K3#TDGu=&_r-Zn1o~q!_w*Yew8^S3KUdntg=9ea@WayfGX~OiQwe+jUWt>l z`j^m@b|Ll+%NEg3Bxien!cRMFc0bQj*Y% zhb3O?XxN!Q)ZNC-*d5;9?wdd~TWTvZ1*d2xQ!y78qmLUCYoe8wm^ZOnb63t%x=H-w zlI(}7UzoaXyDgr-rh(iBbhqzz#NoT~`W8Nbg#}m>Vy{qqAavWbM*q6J(5}nZmmW)N z!viECPQ0UYdB-)!RD8Ib}L^641zJ4zEk zWSBhffcAAZ;eu#7^z??8u#w?gXp1tb`ox%sD)`F17j&Z3#Ha#59QkrL=RnT-k=^g7 z&7*qq4f1HYn)oO(i+RH>qPBDJL5CjSnyMl;zrV+_65&4JpLe$bJi-+n;zH;ai!T|| zu}!gmyog3!a+}gibDLxk7B`~J-Ha8O$XNLl;*N8_@GoVm-WT`ecT668m95kdu%fnm z)_)Ps$vj@-BA9Lmpwyn0I>Zf!Xxsr`T9BAeAoFj}^Xjf679SYI;)#az@cu;)OO>*+ zv5kuT=?lL-T^U_ky3yTP{Td_L$HcIcAhac|q7v)-k@bIp2}vDVu*Rl?w!w|zM^KTUL08{V zk4H1AC*D}b8ISGDr7}hF2#Byx$-ZB+OQj&53~d3C0T?2D=}hXh0j|vaua^AQ!NEd3 zJ)+75<)?eS$B8ofFsUK7RlEP_-)wNoS#~)-`&1TL*1_ zJ$Sp~CG1aHu#B@7-kL_ic;p>+n{iLnxc1?k=M`bDN+jcN;k%zw@BSYAnRojY1bTY1 z+!zBdLWs@%FO1$gR9k)@rlRllDSnw2(t>r)!seff_IEo6hwicl6wcEA;d%j#prEj! z(O|_(V;9;_vk8x`94k zC*C|liz`TFuD6dDSAjh+XSk1TN$yhbjv3jY$$qw~aOR`!%??ymUq$l-2LO~1VHW9u zTW=KBJkE%VohXUc>@jRimH436$Gq?dUEzH3)a~m3uyvMSQU71Gr$f3MBqatIx=UIb zq@-u0LAtve6ln$o=?-b68Bn@Q8tELRB=6_@;I4Joy1ys<0nC~?@3YU|uYDyY`aB_L z`ZW94_U8#0uT$NC0K;@Mwt0;a&$Qz-eIsG zGrfV6=8{(re((x7ne`s>#=}(LZ3-WzMXlB`h-m*4PGvOK97jbiz@CA+4d)+^hDVQh z)W7wUu72OU6r5b@N_jsfq?9=1aALk2g&Dzoq3r~wSGnjf^wM0S>c-7Q!K-KNMM6k- z#Qt4S{MrMUgQ%Ei^au)p_HV)sZJnBGbD69C%=A-r*@t9Wg<1)`-T48z3gtoSSn% zv0W^;qa))jI@|K4t5PM5?iiYgSC|o-iF|}bEwieD$vAO@RlpZcYY(FEIx0$Lmo}_t z-44{~^U4ZxB4=Jw#5tBXurx^%Vp{XsYPONKXM8x!3C|NE{H?C$K=Qjf3G5?PUF7&) z1x8pNfya2aMEUxLGwb@^?q$ChBV?5V>QOK4t(BLj~!2)^);f8q5a^&?X8G zbRDb2?TNzdqC0nR-3+UV&fI71P2fL(%|px2&?9ncH zcgeHCsy*n5E-1i-G(b{SBo9c82kNzkLfX%e5{xB7G!@6|eNV`ZK=#6*{AHVuAT4<* zu_#1^{nlL<3D{$h465%v5h39B^4Oh?xufvQLW`sfcaPD>o9|S zvr?rcFJfSS4J8nR!-rTQCkYnFJDV{w@W6SKJlkr>>YJ7Y}z zIQ9LT@rQ9#lL;ki{WchDOZc^<4>RX1`N}5rty459&73I|STz^f_6ylqEz0Aq5PY?U zfb*tZlvQW|5Qoi}VelwBYg@hwzAmh0LG#@PKm`6<5j4?2y2bLutpel8@RSro_p7jH z|2d*$oR1doI3i{(FdkQH>`p!_#cox`RT1g<^rj*Qost#1)rpaz$WH(~k%7cn=4T@d zntwSV%c|PD2Em?k5Akz2_+`X#JkOdmEp1*7Xr^~n2mAyr9A2>jry-AI_2diYtr zh&#{o9LJrcG7vG?kM*3nVlj8A{V5MEN&DK-AxSe&CRWZWG*33=Ra%rYtCeXxCEjey zZu(hkM9$GndWHG`)j5brQ)uS?o48?Y-q5R@5%AP-Z`LygFesmG`zD_$6Sl`N%&B2gKuTIs1(W z-$KeX5fwZA*$~)KrIK;%qhWDjei+;b0N4if(2}cN#Zr;qO!;7`5I z?;kL=_A8+_-`vytM&;;8Y1m8GkU-oYV_FOxJ+Z~%nY=2C8D|EEh3Cxki35xxK5XKf zBn@o0)2)q2IEy#EfaMvnWwxG{R#7G^3{GOlKs8*E&p2mxsGup&70k)5q@+lutCoO( zr&#RF^2+oPY=k7tq(WewzV!PNE7RUj#G3hCp`XjNwiLWrH3Jc>n)KZibiv4!-|7c1 zQ(4>ie0WO9c?QcXh%jEGx;-A2OnC-)|AOWAYG5sQ%ib?IyeKqp4qTp)+WU8W@{k7;@~g za@a|-PDVC5rpLhiVrMivGdDpaUS($W{uG-l(-I)|^}e}r1(gLG(CZkTl0^uuo92pA zJ4!M|-TRh!5ONS2OgtIwZlb@Uc{R9Xyd%e864=;>`tL@u;A~l;>HBx|R~8#d-+DS} zD4ty@GoL&uDV}^On=w9UitI1QFWL&z3*jiSo`kzW`Jhi0bYuckMt=yDz* zu{tYMWm%FmFW2macI8Yg)!L*iMHyV=Kx2hQ)GSLSwR$b+qTwi58=j=2jC=_<_G97j zQce#m05Hm>Pu^#sNSEx=XJ+S4Ln!5U!Y`q=!V23@zFL3@HR;sK zW{K1;63s%2<(BH@Cr7+{z?U;jxOLWnY)FbY#bO5zTM7~yVOxW!kO!xI}r3nsM!VKx2 zq~Sep>5EX^^YuJ_BiS%Ot>x4TFEUx!3`4{5#{;>(T%pB&BEN8}@2#{R0-HF*S_5k= zB#J{l$+MuUS}I1 zz+LGkdzio&@=5!Cdg4`RT#)5CYYd8jw<7J>=Nve9pp&Z|3p`J|(HmCk1#*GNLya&J zs7Td5W)0Rts6aNMTv{YjsEO9UaQ$`Z~N?k0|0FSJz=j%C^ z?z!oxs>L>?L7_r?{{k;>pQ6ql2KhOgkM!|D15hTqD>8psg2bt2EKAAozCLN>__Bk$ zp+n@2MGvYqx_$7R4<1QTP!@TQC{5|TK=|s4-qN2xH$CBSV(=60UDaxVr0qKQ=B78t zG{bv5jXRmc5&escOQqEgw39U-3VOd2)$jkV84y3qEVlO3K%^N4g`q68aK+spKX~Po z%mH!e1T?{w_R>Su4{wC)SbK2_^is)DO)7_mYf4)$^3FjO7r~PC<7JB8^Ik#8SG4_# z4fr1=(`vi^-FO@1a23C+?0rNsAPfVM8J+mB18UEAgW6A*eKo=ZxUnBhj?JS ze-8Jp&2C{s{!KqYc!|+Dj_f)0v;@wfiv^FtrZsyc48b4dR zrd^K??znYW1Kt&FLkvf%xkfv)^2n`vTK+O@*Adq&tBN!masA6r3a3%>S^Go1Mh+(C z|5=jSSkYla*~r2Jwm%VLqaE>*e3*PG#NKSSMH$!g1}YQo|E_-a)w*X-fzN_;mv^;&jj(NvH4%4Nlp+Sm|;3 zc7W!h;Q%=Mts#$C1gj-1ZsP_$o(u{NVna&14mU+g3C1YBG#TI_V+pG8zvRh2tR z-k(dk1o-=JA50ek7gvTzHRJz}ehaV_T3_DI+qn~Ph6ANflIFJ4Io1eo6LU=inZ^6}+eI$jLxw#M zBo@zqn7i-x1Ojiy_f@q1p*PR#i~616YMP41=-EzezEk zaLCFRA<)CwgTA&P2%5kbZS`VyFb|@#w|EMb3CxL)=()#^n{>ZB< zd6udh;6s@E!q=iU2LlMxTnWBIZw92?2~3&2`&a`dVEw`a!eH#G@6d)ZA=->&ITYeL zZ@j2HA5z-DB2}GwZvwb3jf-8KYxFv&A;{xm*r8P$`#F^@s+--S_m6~@oNS{g!ksS0y%|X9l9V35soMGZ3B0pd4kxKQ9b9fD z*zA|^+37z&@2hA%Q(E{m?mNHCaC6kYCC6;RLDy_enQuZa7eDwd2%&^R{GzLcj0Y;Dm3zLMl!DY?C ziJ(W*(8p3`?$L@)DE? zrndO5yn5#0&7yUVZZtbpwWu zd554y&X*KvCPhwXj%LqLX4IW1EHAUyr}Kno-Dq1Mj7dAni z$bdj+4Pr!&;CtH!+2l#nKvC}E=6a-%;FGRs%|cXql=N68b#H$^+9*h)e$I&IZUtzJ zeur!Jj{2YyJDbL_cVHchW}v!W<|pzmVPz&I^MTtff`;=0sP9%igSkW_O5H{et;@&t zb{8_~4ci293i(7HOUvr;!V;yn+ihmb)3c9{kD1#cFTvf6gBFAyM~5_D4G_ghtpXDe zDsVoYj|*~dF_q;*ZtB&i=AL#f;N}W?*?{3}VZrLmN$%&BnfiU_5 z`OB~phR;OrNK@??w#Esh#( zwPKeh7fx9mzVO;1Gb*peVNDlnm5uyW^7`3!c6u6qB;>etx)bAttt#qzBPaRiR?qje zW@D%BOK==Ru&R6u65N_8-3wVF6}Zg4GmdGj^iMMsDX45Ay248W*n&#V`i0tN`b3o@ zqO1Y~I?=vVyL2GVDT@Fuk*I@hYf6*Dk0`7#tdQs&)@B~QF z5o3{?2e{tQw0XO|L3UWumsHaqrX;sbxEsVxaN$H{MIpDvFP9La64MhGy^&K!DbBulrb*uRO zA>HBD2Ahy`{>-|b6B(*$WyaE5`Mc8k$EbEz7+0BT#%3p)CDrx_vS(80&p8Vuh&NqX z5lAMzXE>)vLPS;u)Kjg32Adf?6D`La6Vt-~5I72A?32kL>)L`j=4RSXH(8 zc7f_EU0DtfFTiM8w3{FU8wH!;rclvR{4Op48{#LH1WB@4a9Ien?!TK*1hS@f>chUY z-NDGs_79cBnx}aYe_q#?fy_{QhDDp3RbtGBPFZ9@wMl)vr&T4#j^$&Y-ln7LM6`w1 z7Q*(_7u~+MQwx!yv;UY>Fy~o&CCFZx* z^dLO;#=`RQ{%eV`?)tx{*#C!0e|t_p~)#BxLAkj*KrnZ!!N$%yqA08!m+mjLF*E=Kwl=O$go31^#>seNat9*LX z^V0#ZX!zqm|H0sf*$r+=oOr)k5x=(NP^u+D24~<~d`?CEoUm?muDA zM;@9>8HV(Z?eMdoQGJSA@#um|2dB625+oHpIS?ezw{DF-Kmu>qd~Ro7g~0Da?zx_x z{=K?A>*T$7Z2iK>^++MOu=usP1RF1X*G^4is9EM2+sx-wxu=tB=;{hA71A9Fm(OuH zFzqDH6k#d6M!dQ>H3=E~bsijt`Fw%e^8GgH7C+C*4X`|I_vUVFwrlBLY)V)#`Ysl^ zCafiF76%Z%d12&35eST90S6IND)Jboa5GsZgMeYnY&FUZu_()QM@lEjesp$k{Me{2 zEf#}LG)P4=W>cobYuhLR^_s5wemb*->QP z-|B{_S?#K7+0+qtCs-?SE=7&p_G_-lF(L9=$SYPMMnh{aogPsYMFQ2opa$DGx@$*K z=rAN;P_IYUnq&z;BfYi{3x%&Z>3$Bg!WeW1z?5&Mh$kS{2OL$svVQo9X&+E_(noL_ z-udx4y%pT7gdlwpW5ddJiDuDoTy;K6cx4K`(7p7RY-W6M^Wmb`Otoc+%$YURt-36^ zT#9!0Cr7rJ&|hLSabBhRZ{n}lnpDMOFZgBCQW*OsuL#eqW7(feu)G%)%d3RH+FPV` z;`f^h683pTO4&*JU<@9`dd(|H(pl{H3B}J7{cfX_({%8ZHXFRB2KH9MKA>-E{pBlX zz#M0Q>h77U(& z(kZg08ZKG4*B{Y|QO4^T=V%d6{(@(K>4%6(ikx}EP@a7xVq*U*+XUHpS%QX~{}Q=F zXy*bqys)^WX>K6cn?%{s$|6x1AAY=ZV53L71_{OVq1?{yG}taGd-(JxX8d+9O67Z# zEg603j@G$Z#3FvccGRP$Z(s~wvQzy~v?Z7C5;Y{fga?#y)HfGd*g&M7=vh!!@`COv zYHIpz6i_b^q;X(h)u$4IePTEpblp0*XGh~yu*i3}zS!3*y7wu?^){{yv2VMBShNj7 zY?094j>>EL-oy%ZFF7aq8$uI*h9c_Wx~4%peu-C}Kl4#_A0|9E@6Qznb<8krnTIOib4p}YMgaJS zwD5QM39s17tb2VkaNSq9?VrXPBX26nb3(F!N<4c>}?mG)_^H8h}hF#4<_C?zFT z5vDtDM@N7na9;(Ev{(3>+s&G$PUnMHVT_m4sO>(!v{IKJyZyUp2L2pVQw{y5l!!f) zP9cC6f02fH!W6acsS@!$;|XYCucKxYU|sHbF;A}QbH;5XpnUFU=JP)2q@(NV1=fPn zdemo9IV(j4o>o}(7;TG@mFkRDuH z>VawM;MP&_uDDP-+k{b92e1>TiY29ZtZjQ@lZ%v)7BU)Fnv8Lj1Cs8D9BW({xHDGm zeL>rAqYm|Kc8QEA&Lh1q=CP$?#DX^DMZ(QUWIx;4RVQB4H%|iH0uplDzu*<+?KW=4 z{`g0YhyJSEa5B460-=*e>w}SkwicLK1ui$0-H5K7zys;W&t@&AJDGIfVd2?%jm9WP zjlMl9LK-(8`#@!38D%#ax{|=>ZoP~`?(t$T{xvqKp?2=bYmA}8?KU(!R(39A2l0uC zU+%U0qe7un-EZoREAL0AjP}qbG99qLjb`Re%nN4TjYv9hiRFoPd1DWlAyAS1>Fmrx z-y?_lam-uuOq3x;-O)|S!Qv@v@L{`$+**5#UF*4gvw0M9oETx?Ez$eHj1~(t_eIfIA@<3t|D43a%-)RsI9 ze{ypW;1`bU`^0#8eFMDBe%hA@V7dYrPO}K;0sY=xAB^}NMxZKgjO`$>^n;>%0ph)! z3q!@1yL#U|Mt%w4BsghD4mu+S6u}b(n;8BCv(f#SS8kOdq>8q6+keB~e1BFGUw^)n z_-+pU1p$BtHv#dzchMp^@#ON0+xJ@PGMqoI+mq+p+uc$JH-CUjY#BLk+7W40 zg_YA4C+x+Q;@P_M?Z2y5M43;|U{GC1oW^8OT-K`|g z=%#R#?}$E%*p(-w@P&2LZkBPK2{_V0ICuaNF@XRsowoXWIFTD zbq2?ihfTX3TZ8+=Z<%?w~1ghYdI_Y{cAGyr=Ps3yNS%X&IuluDZF#3 zX5zYaMc4K)5ey4HMrurU8m!`xZc`hk_c`nU)%RdkjvRMdRTZGR2WfFOt%aysI~Shig)z=vrJ7lZt_o zepUaWR?M5HaVS_w`Q+XEn89`fEM4J3;T)sdCZuQ-ay8kkJ}~NkMF?4D!?4nv!p7Rh zMpp_CaR+EgJ5%}<|1P>w?RV|AM8J6iMd|_UQpS)d=V+d`;eG~!<;b1GUSCNfhA8GZ zdQn1`gj1r?PY1T4Uh6R3cQ~I+X|hOgBW6 z#szIZ4DWjyD5G7aT+NWz{>Gwq$x9&;#^5e%r|z>oh4>otDuCr3wCHp!#D-Gn29@R} z&}hOl8T;k6#-#X`h)s$NA%+i}C$W4*&=r*nLwQLK4(*tX9M97P!lNdYIRkTsg19^) z|2Bzr%9{`1fydl2SJ9tXm^)XlDeB6^mxx|}X2~{H1_Ec1UV(dVM47Vb1wwEAcI{KM zuS1L&{hEE)tLy6CznHWaxA^`9)!!6MLs=!AG%TUkvFYSWV4)e-!+{^NnBvrxQ`%-& z}GKM}4rM7l0E79H?)*IDsAPu$lLfp7#u3S$cwO<0O zI_WF_r1X9<0QX=CLN)vum&@^MFQ=OWC7B<~+H$NuqmWY~jcUSO z;$I8W5FFMbra#APV`=)cSHBl}2xIC3@}{9VZw-FzlqKYJUFrX0(fkWzqx}JEc?W!I z6$R*m8S@0>6)kH2a{izX@|1W>E;7-7?rU*H(vx{-OCe-IQw0`L6#ca9H*q>B)L3LR z#;Ug}f*f%ueet;E2z@z5*j5TF6fG0Vw3PT_=uV-Azu$;L^@GPcbmI8%l6s1E)YvnK z%$UhpP$HCya&bmje!u^_`tQB>Ok4t}6&ycR7LY^s(hmu!hhl1YZnu2?UH5?SLOhn;c^qQ^YP+8F<{Y=U>7=hNpjY6leJ0JmY^5A)bTeujvFrw4^wy^u~ZZ&N)(oDy~lq)g}K$dLKj# zmmD8L#gdT#00l=@GpzVjQQ*XZ^iXZd{=-@^ysMX`)*NN56b)!$?N1jPh-=F6fR+#a zkoGEHhy!WS4h)jR2)6g6EMoB{(uyNpXe6EIfA#10Z^|K6=ADPNOK`{C{q*v$go$qH z88A>1QRJ9~T#FXpIpk8BYNI_CYJ{J(dmIQmCcK$If~w+vq+%IegM&Urzw3oyQ+mW7 zA46HhwHDFrQ zie8V~vOjFmGzDFu*GfNxwtD@Fi2W z_{Zh_{gPl%@*jFXgSGYpdTRbZT%Vq6pNF#qhEE1r5hF+Ha|`5KS_~m!kx~TA?xS6? ztAS{Jsy=-#Yv#U+r=eT}kPT4)3o%GNsRG<&`>AhStKIR)>`RTg%pxqU&hb#WlnqZa zY=8e+2cg15{beJ5A=BsU72H|$so#b+dGL2Rt_4r(L@sp=zb^n6?p5^q{+TU{xCMoJVB^urZI^Y5O<;#I<1rvNSOr4(#k`V;1o&bdD!N zH~mr>!J>_lNGWYfn+-p}tDO_5_t2Aip+Vxf?mLx>dBti9c|DC!Lt?304P|U!yS&g6 z@w^wpQ);MDV1k6(wGl5xoY7oY9Dud+-vQ%74A7ItxM~aldXHi~OISIEH1MWt9b&Jh z>`jB~_AjVVo`79u;?mPLLd!j=-(ZFW?BOw)AEY5VYqsaN_#Z&_Ug{ z33&J}C5zBJY~dKYm(w9YG0q&(?$8deg5bE+x(SU*xKIPH5_#t0PJlk0BX?mlK^K70C0&0+7}zO zRHEuRux`0j7qxm{$QySu0xk84ws>PR)QGPb3O+6jwJkm~cLhru5~AVen*aE~k!p*b zZZ%~VC&x6!nBG9X#D8h_xUa72?U(Q~fv$X5hJXD%aYtPf#Ob2rHi}k$r@LD-FyA=Y zODn`nneu@lF5%9DGr1@5bKE72>HTj~CT=cX>D$|L-^cNdtpoWK$ggX&*u>fDxi42*gi+)QMcVL`f@qU-WV1H?103Maiyx z`F|-WKrgN}fn~y?AJRP2O5YbUiP6kcp_fnc0{dM9QUz}8F*UM3*%0wxfLHAGE${Kzk#mAy@{T_*hcf zjh(n$!3FCC=t@bfmSe8jW<#d;nO4s7USY0?5l6OBy8e@EGQ#YMpML>5nwYBQ5iHwV z(eEd}rTX8ZcJ~0})MY-6eUdYq=*77*Q5lVsox6e67&$k2{lYeSjKfrqtcl@@5~?S6 z@dNkhZQReS*qHnqYT^?WWvsS*@}9NR>h~2I+^743b6@vdHIfMjwskws{g$bIjQ)IW zV1S{kr}t64ju&;OKQISp4ab}GOF0)4+0OeQ>b@#X+3?c;k<0N`Z0-w zds%cjCrQBhpXh8(%->gU4jEX1iqhEU=Li{Pn7G%Ao`*jPD#|tGR{wu(>8%Tuz{Y2- z7k|g++Ai}dfKTAV_6KCIU24Xkd3`55Hn*c*v9@Ko`KodNFJsIrsr^)42Af>qqfHTk zM7(0sHt?J&=hH|n%OO9rYG=;MfaKnc!hf+KhYW+cJt$H%k|DlA#_gmhO5v3>+1!ooBU$B@B?T_qfx^2sLk& zRbo7Rw1`jXc}5O-Iyt|+aADXgB3OrKhTJ1@dSM0_cR<-qh>s{@!I7_`kWm&f z7miLTAlO@kT7lI8 z4{^F0*gzB$(oRyAa~;v1_X^igOm1`);~jYm*!+}D7Qr7NZgY$(K=o3%0TJEM%(C9I z(noQ2y&l`*(Z^%LC3)IF`^CjozVDH*JiuQd$6_gZ!O(mAwyM+hp`-?wrWj3KZS=P0 zYl#UiXM?*Nr+vFV*PACM5<0$u*e-tmwn9tku0busj8X)WPQO!LC7~dS-N5uBFNUB% zJ?T=Yo=yPth6$6U!ie$BGK$rR8WI06DG?5L{f9U*CW`EVpk(3dbPB8V8Odt4Wp?h! z&Q!Mkw}}l_Pj61%K~nP6Xg`X&u!g5=Y*iPdai}`tt9j+76#8(O;HGuW3yT0e zw04JqNn<=|`~;#O%~3&D^%I6b0cq}%R~(v!89BCM2hZy^ftpSSrj^myA;H&KRrukw z^IKj1jId1Z&{=nVqgZLZ!l6>X!L(=<_i* zP5isjKEW7EL!n!V%KqUG$PXtJ;l~{3iSH7|3rC3#E0RQIq@0q3hx^7Cd@akQ@L95~ zJnAvFyC|4utdaZaC>S&LdH_1(ukfn|-Zu=&#F^XcS8SrN;*S}<#(&6AoGBXFa1Cmd?Sk_?c0m@MUpsn@{4iI`G#m{HN|=Vp^8gl6;CQM zRKPUoIQ?0**OweHA@^|{%Kf;*128-5zxpm*R2TMJp1ui*BkYw&^*8X z(?$Z$C#}D24dYwn%@7nG!vFm%j87u5Js&Iuv8R5<2k$)sg*5CoandN1qp}UAdb5<- zSqd9JDE*_Or?m*SpSFlR-8EwK%`a!<+Y>TyBk$YU1~;+pV#@KFyQZd9JCe_WHHrA+ zyPu>J6LBGl^miwmdlpfuJ-9UR!6m}zfLhCfN5LSh!_Vdc{%d!40T*Xl@YpActAr3W zSiksL1utUIB6x_P!cS$Ahp<#vNLzGC6kGHEJuCyc{Q?2II`p3mtW&5Gb&D<;EcoA-?$p z8z3eHyyGPUHZiT@|8E*;>DTZ*qcr73^~KQ-P`{khyOF@?G@J`ukk;Z&i}ES{wD!H3AmwV*!A1{lJPI^0+M!c+vOe0&cM} zAaeeQEz|XfHXi?cxZ($)r%7F3Rx*C+-sJGysu-n?p5i_=m+8{S*cQVLg}sg7UWTWf zaLlJ$9O3Q@Za}I?)OBhUzZmlIeVl#!kfs+sa`fWu{&VD?#7UiuXSzRsy8BWpHg=Xb zx?*nE2pD}I*Dew^P;Y+JxZ%`l@q($L)rXf8)yI$f-!-l{7`i9r(Fu#9C*hCUYCwuiy zN=Rw{^Zn{d+S=8qHsct;($z@k%==-m!ccgpKey9$H!rB|O~=EvCU9*P2wtIz8?G^J z2>66|T`9g4L4#BgIDVAWGeB0OOdfn|+@9AI4dcbu^KJrz@i zQ7<%~XMvqJGJVFh+8~G5bg1?XHI@OtLO;xUsk@h2f@0YUAnKzCcXAwBrl)cO0H(O6 z8UHsU{Z`dggU9Zw|1}f}fd4b9CF)!EE?JU;&EM>sDedph0dV<6&Z>S*W4h##m5VRX z@GNw$k8Qqq!WFgf4|KMxJ>ry;Xe^uXHU&v?io{1N;U!|HxbZ%6BL>S@pt5_uI>n16 z8h&iIAs)i~DPv_)ldjvF8q0k;vIpo_gXt5U6^8jbaNYTOtU-hnz%1UM9Fbq=Q7{cE z;(f}Wjf;ZOunICG?zWW%vadvWDD|%7uBXXo{lB#Y}&THuNOrTkn${Gi|cSP(*IVR+xrOn@!mKBhaL&C_$P!54utmRA)n#)2`FbM)7$pcPFi zntEzM7Y@n(Yor{+2f}^3)v$o%#upYV{V;+H?!GU?L`=wA-JVpa1m3R*KU|&#sr)G8T*HBU5Gx-ze<<=JTH2{+8QdV z-TBdLWgiiL`yf|4Bgrw=7z7~((7ad^@nb*;G~lGvvc{R5Sfy7c-1IZ|ddk>Cx- z+uKFRWuYxhdKK5pZ1C1qW=;HQx>xXus{g#3OYZkG26%7#$)^h;CH=&MT49X-V}wiT zgB8c0_>qqqqv|U*yW|K)r`cHSGK!Nc~AYz=cYBuI7c5hUPmSEkRrS~p# zChG+J0mP}O=f^?Kg)?mgAEB5qWHcVJi*7qX%LuJTb3s=j!#DexG@5MUz0S`LIHAbY zu#4YJnEZ4b^NMEunb(3_SVi?awe3U0YKh5^P=c!IhS3|7@x!m}DPw*hOWp`m4sK5k zzhugUUqFFMAplG?&pe_Q7~8uWHimpSk==V*O+KbrG$f128#evY27YC1`uTynU4tsA z&(*Z;6FPYBcw2A^NY}gLW+u!n%>+7*P;T;jPJZ?3_VF~sty2|ZrLqJ3N*)^dDtKll z8A_)!I))j^cYg#0XT%rDrKbVcU9i1QM!7yLFE1|qLGt(S_l#29WEB-ZRn#=(3=G8o zxcl+6+f{JL;w(zl+E}U|vzWKclNDC*=bG<>n&$Qjni+AN$BjkTVQ~V(e_d;v32k%# zx*-(AqA@$(hmiaa%EW)xCImzDNr3yr*pCnE6$m-?-Kz0klxel>$ZN$*Iw=ruhUl zspapd(8Qz5&_R25gYK4NY70&(pv+Z1NgBcguVtVuw?7IrJLaZKVRq*JE$&lS&SB10M@!P{@5ybSc%r?R&#cEV-gYoWX9pTdfMXc|oo=)p++|6f% zpF877m809%gT1Gzbd;Xgzw)sM4D_ECo0%cO@vYMS1QN;82_KU%w}2VABtnW;l)q#w-2r({VsfT>#On z1?bW?KSm#z&j*|C#FaF4txjn5LU<)!(f;usHZ*~12Zk`cIU|riB{_EeF9v(HCk+eq z64C5#iD|jnMmv>2jCjL-cfjkMhof4rH)nnYxE+e3iDPl$~?WVAoo zyF{pCe!{;HUiSz%o2sx4UD{&EAwgcBHtSxI?$;!_iU6=CUZt#WoJ?z!^i?z6tXLk4 zvQ2f2!qGLoLxeaiciBuM8U# z$|IIaxc=|bq=JE?F)C0#tMWovIWeUhX~^pA-Gz@4=!p3ns-H(JtMHVUVq4X;5f0%_ z35NF_{n}or{IQ{>Z6r?%hNG3*N4Az2>dpMfUC|Efq_hv>(}c<*Jn-pp)1IZbZHYI- zb`a%q@fI|mKKi48mn(#w{Xz`_rgZlZPqz&vJ<0F$xCB#we)tOjF#UlAEVW2Kliv9W z(w3^s{`D2&YvIv`)qcu5f9k1uotWqx&b`Up1Tl}dXoBWV+UP6Pf32L#F>nN&DUT{% zL$RHY%loa#x_G*|T%TmCF8K@ zj)HB-B(e-ADH#*2iK zhTZ)%Sw5KqO5Bae0rx4c4f52!O$kQ>2ICwD>}WNDyX9VANVuNf?7%0X2@av+5?3-D zA3hf~uY}<;%JrF|D`6uLg+$z|L)vdve8)23aNH=Mp?p{s7VrNa@}ld6{p?tNa+44^ zE4h{w(Ph?|_sw5>U(P&zYT)zFN+}BQHR|1wGfx$@zG^;RL%D0bi@xI%5|R-O z&Hjh-v_~O8$wN^uDpiL?#b4$YtXe|2$eK|%dBSBw;nr@KIZ)76u8y?-5Kc_GPw}<% zB}3UL&eXC|bax9UZeHcUk8BKJy7nLZvnl-KF@7LroZ=jZtzG!+G0Av%c+l@XPmSh( zYpJI$nS9oXStt2GDjj@EaQm-_SNp+?lUfg0O{2bL$+;yHmKE1({FJwL2cRr~$vd~+ z=#8S8-2HBS)*9RkKs#klJv~4xegG@db2~KX|T9PQccyo z6!H#>VJ8`)8OtLS0e5aSb`j0DGrM&Mh(H(&zjX}h?t zC8U$MjwKyk*(gi=>=}#PHmbd%^5O8WbYCKp1rDrRKj_3~p_Vn;Abh`0T;TpF`8T~< zNj1`#keRIZ_jl8BYDAVJ@#rxPfSQ=pC~n{Tt9`!`AGj56V<;dG{kNoR(cWnsoOD8+ z@scmXN~fv~HaYrNgM7H{$u&rk?TAZ2fu>et)hL_Gw7X_r%&vBV+yUZtba-Tn?2%g@ zq-R~)WM(GoF-lf1%Ed*}-oE@}a#GR2Mto*g_oLFtn2Nln@S{rNTyly{btn%{QwAjKX`1m->OVtq0J==X zyLa!VmX>0z{7X6hM@ZI!M&ux*@%+c~djY=KiUw)*ApDO3YYMs6^uvcmmP48Fp+C@7O~5!P z^*lJ2cDwEuD+m_m78I1D{xUd4DUm!JdGG59As$$@34GB7bki_-Y5%&`nZdHRdvQ9z zVxB3M5Opy3`r87lXrMsQC15WcmCkOPqzmIS8>#7#8!mP8v%@V5z;JEs!eLDsSNS=r z>|h{0GyCn+zrVaH!RJ~kAt&rPH>dqq1+uu@Lh##u^sIaV%+F1?n<3Um+5wT zBnFc(PWapmFXtH>T-*qbVxH^@uRm4Z?EJtW`L$($B$0K~0 zzaLKXTD*tLNpJh2PpbFkZ34}wU@f$KA8$CmkfYOQnR{mLx#w3hnfyuSd7riS z+H0?+Um&oIkPvZ%rhH+iG|sdg$upUk2_TUew~VHzA-aBcV8CPoH!@e@MtigaB=ieJ zIi=-hjk@=QUU7l|R?i`^jm;Q8Qt25-{=@XW<`a$)k1KAh{X!aHtW6>Bne9dIIpx={ z;KRoJg>dkRtIAn}9X9J<(tbC{U%m`}9pP z$!QZJ%p@4a;PZLQCyfd>FYtlNdG$gia^kz!l-RPuN>xp0<+&L7QJvbP>ZcYVNH7V7 z9?Crq97EE_Gtcan#DzAfkacArG$49tmhozu3@gm1jj){FlOu8lh%^2sKZqno2a-mD z{h!^^PcS>{7UW<#c@mn3NA;F}`1?y;DqkS#=JcOk*WU)vFfmi)m{UQHZ!qS@{d9xm zIC1i^cYmp@JM<)Gt5tTKx`Xp?gJirbO?~f^+)UJq5=>Iw-L;1c^?uvn-?&uFA13-& zU-%s+uQ%V!vsA~j6wAyrR3zRv9zK`oi!`7A=$15aAn5hFgr-^Tv+~#9{DD${*n2Y{ zsd+$b)4k>=Q+r&_CxT9{HOe91IRH6|WIpzv99J!9s;bAsKR$4LvaEjMXdVIK?f?d|is47}HWx*EzsE3D9nm&+Q$RGb_f10Q;bzqJk|D!ku&S@!;ha38eS=)-`5!RjSuB+E%>!r9}Mq2W}dCvJ%dI72$nzSlp8^|>I#T&j+6Hhi zqLmsp_~m@m3733C%qSVo^%PGl=e(y9om;*y5L+mUj!+PyO6%<8dr-?r2}2Tpv@Z%Y zJiaW~sMT(=J=4(-Hu9#wL$4m(4l@sWPgK7-paim(pOYr&6gdWBzwT&$AD;AG zxY7ZivuRh*)9ZQ5P_;{@andt54PC!rtJwRt_LiDg$a3J|aad)V>QMnm8hS|dDfHM? z?h(rWkZr();i>czhfs@Ojd1_#vU1rDEqi4O`xoq}@|?k~VH&jfZMq<<4U0DGhjGMm z1Y{Eps$#-qB#0m>Yts)0I`+GY2GM;+;`AkWe@^$_63`dA58?~PzC22t!N0uhva@_M zi`E?5mN{bFiqGPooegczt1kB(2zC1YeR;PPEqS(Lt)TQUr%2=on&Yg~{hkd)>vLWB zNWN_f2`Of>FAq@ygebqkYOBdQ!icJLEzL!`3UAw{6v#9?;?n7Lk!Z8#M@m1QI; z`ocageR``+nHz@^NIMRp>dC7 zIRQpmycCYufH^MGbR#$w2CgXvJQw|zcJrr^d3xeqn2{|kWV&z&^&q}=D7P&(|6MF; z^F`)(UjrCxhBynIvU;tn;;x3fQcv7&jdl*!p{Wr>rB7vZSydDl%$gq%LhIUyvy!q< z%31_wwN1a2U227U<8G75Fe;H;(I!rzet(u3uhJJH>q=N?VvPWPO5D-AGqOh~b zm&*mqan*>*pA!jwHLhY3w5lNw2tcr4K?8Ok>K(^czsF4ate%JL+*<#B*M0xD>$iB5 z!&pvs{#Zf&zpeMIniKgM1!2YpT^oag*j&6sBhbbPW_lwtmx!S_f<|Wg9S1J2*z2tb zY72JsnlWRYk-VM69fj;Lp+OjOnIc_#jdd2j+a+u-m36Pjqt3q$+D< ze)81pU4hqxNceiQm8Sg4&uXa_b|xn*G*`kzrK_7oK=Z zBmIW~vevsvqE^&V9FKs&UxR!IP7pCaf63f1d14YN+EtT>a&fdUU}6%!pQE{4-x`TW=2_DI z2lIz;7OLq)k7FbH=rq3tvq1Fq03R2nzd}ksSpt+%U&%QCIh(X!hcgTdCFZqBgiAE-uf5aq1XJc`@>wcD)!fa}e1;20WZd@ULb z7f72nkzTJGNAOvsMw|vTCa{YcU!9srM(0P!Ab(e?&rQgD@3W^V0OU$mwLVvE6tO+JmP{sO1D zQ&rZ{hBc<7F2;|^>Z;>VwWdpp`De_iq?AZIR3{B7>zD&(8;2<6H2`w~{W5m1J4I&6 zmR3|6xaXM;p77c>K0X`et!&7W*bX|dv0ZziifsyM&tg}${MMQ)HJv!N_bHs5PE+`E zq^W{B{J;~LNwT(6gzG6wgDvaKxvaw`x=alJ3mj3QJHOL_c!WuB<0SNNIA^+fPyq-) zLmN+hMlU4#wDqX8uf$Y}O4);tzO5^okYH*T3oXJ=924+EC1NZP<7RHUmZZr9^gk~p zRNs7{p~W?~t%y~qXgnu`8aqy+0|rR98+@$kZDQKG+uc6N$kx%m8Uyyt+BeTPPSVy3 zGR7u0sYCo)F|FWsF=Irdjm~uxRt2@OW9NTQe_(NTHV$Let0(j?0$JSPeK$L~cK4$n zKm48QSFZ3A9Xq|*AejHQNc(Pf0X4SO9BA9erK+17rmyE3C~Y>d@Ln$O@N=6RuUX&n zD8@~D&aY|x@Gq%y)LUP2b+sLv!uR3|v)V_;cP4J+{ z^W?T1My6jA6X8ihno28iq&(iX;w@sJMqvRKcPCfZor4RV3#YQ~%U55Un%lbLD_`!F z_q;kd*;WkorVnVL0(xvKw!~ygY^GZeW%ZQXKCCwVj2`$IZ*x|1ztgaD@XX@Fn5qkH zd5#0eX>MlXe3>t40-wBi_#1Cx-ZBUF0(UCrLF~kA5dvx=4@li#x3V&ar<~k0|Ej;Y zW@bldphE;YQ)6eDqMdq7xOREU%Y?RsP(q{$BNP6$)K@+}r(1oT_?LeMHqWSRi)zug z=Fi9>2p|8m-sxd%e=0Zf%{>^RX?s1xgvsl4Y(?tf7*FaR(emMTOR~(G85fxz-@azJ zK@Jd#d$|)Py#wKhMWi}cJrdB8MM9LY0Numf7^r8Y2}TRSn?N3$ys@Ngx{MF0QN$KH zBZ#9;f?}{U)c5v+&z8Oa4peCaQUQZp=)J8IU;Pg!-`Y3xV$qCUR*EJ0Qb(vfnuYzQ1PN<9bY^7O$57S_x z0%r@um$X}cOa?LTyQ)>YJ-)fz5~KYd*T_ts)*;3B(FP<9!vQlhF^#PtxUhz?6Pl(L z+BOYx4&FS7mdSj_PV}@M@9(OWZha@7!Kd~s_uX@FcnE7pd;7-BYiq>6^Bn^N19nEf|AE5)1<1%pT{+uF zropsuR^m%PNQ&k)G{937w>|gQm2v(C?nlN6K5tr>C1R7BM;{P_NKM+XMPng5D2{pf zNIU&U&N4ziAG=bm0lrT|rq8(V;qQU{7Ei5YXD1cyIQ8;E)2a25AZ~0%@p(fa21{3@ zlda2fQ2$K7+G#7>=MotNcCz`~!c((8Keu<{viUWv1QRmhc9fr+R8l$NPaDD5vAM0d zzou7`kwTLgj0*}su~6)Bw@!7A2%*XXONr{t{7{_$wi+2(gNa`sI+9sY+bbQd1HQMj zg3Hf@5M%S$%a5Bz(?XUJnJ?3ReJ+p2#K9pR{$Bjrl0^{p*?Qn3KVE<3p z$dF5|{qGOCT61%4qq}g%Kb;L-1k7!$-wJO$bkss0@5Ngm*KwC+l|42x&_B*wXfS7tmXZ!?N-s=u;P%{jd; zuS~WsQUU%1WEC)WYkXmgwf#LwscQ7u@g)mqX~IWlQlg^Q0IVVG(CBu==2Kc5&3n8vWe`c=>Vhv>Ph2}qtI%B0$zRWJ3ISRFyK^lQ^*brNfFN32plAw>PjHr{FFpa z=S~`1_$l(_LB}hA{Ykk2=#*I}JfG;t0 zS7ZRDbO=}L;{j93P>u`;f?dP$XBCCLsL>W16l-PSPkH_290j$ne(m%IIODf{Bmu6N zq|Lo3Kqqx(N{d~JBQ1V^WY&)DsIkjQV^pq0+g2%&Oj(Litb0b`Jlw!fY9rOcWx%vG zu&ix!8=oi~E4VDBYsG=>5%CkZ9M2z9x}G^I;}FhHk>41(I}A1l5G}Vj=Ey7u2Gmv? zPhiGT2hS+EkGye1uf~%D#~jGP` z&)QM(^?OHP_u@vPUdy%Z&soHW`I~Tzgn)wH*{dF8Hk+dQ-@VcgGWYE0;O)Gd1=i(! z8X$JUJEU33zw^>vS9gcP>VdNpf&>CuAVJv0Nf8(T0={RYhGCwbubuuDCyv7*z%avx zEkGB-3+juR3c=V?4?+)YOnQvearLlfMZK^2PwYNc9Y++|+GvNh7OFr9^hf<2ubL{K zHU?&|)l1!frKctX-ctd1;`)HG2xfY+tBE}H{zJCRE#AbhiA0UWv8t-i16}d4t&gXe ztmILhQ>{1uc)iPfdNjx{)1=jO^0H)JkIWF%k~u>56dwUizav)VL?TwN77qV_GR(^2YZ4j2wha zSepq-d$HnmC-?n>6PLZ&_5tUmeoP#k)*j?8XU~k=r!|Uq@uvUJrwFs=@tc^HL2bAE zjgHG|IceF-+h=X|0n@v{P6ZZ&RrvKWGz=@N7M zC~{c(yutbCZRh)GYJ&2A_IVN4Cu?88 z(6;I;UD&P!GUxZh6WTUeVfAar++YAM@@`oQ-(kd!v?gDhuTsp%_NkM##EKXIA_2`tMwcS#E<5A0xDJku4 zD`n?0BmLB@{&AQK?GQU3i1h4in&)ZwH|&^dhqLoAsB$pzq|xr%H2q;KJgd3v{M(ET z>gBea@2*se?HQ5r2c{P2tWTP`FQ>b>w}hVTX9Jt%{pM31@Lg0#48gW07AmsilybHASFwn?+T4SX6p#;6+VMQj}sL|fkzN=URS6W(xy4MStLWhHsr6_-S4 zAnotFjPSe>N%S9QLQVZ<vFzFQsy>Tmu8kxLcEq*0U~hO&FgwWd_v ziFLI3ZRW8FS7*}{i_4!6 zo}puT#6_c=%=#=B$f@izKps)dJP9eWEBG|6Kb$!9kr+N(fCgm*uE6w>lox4+c&zL} z;nPxN-|>s)WH9!-YFbcv1#L$>q8E(LD50f7pCSVyz8V64BFZq3bo$u5bV6pa@AMD& z@Ct$-&Rtn*pq3lYS$slcqlCthRrjL>A8aUHTq07We zKn5#5QN3n>bhjvM|6{aODw3{v$1w2QAW>!6x$fTCAw+<+-DYA~r^$yFa)1z)2 zsjrXXfYpBsK%l~%`1su^s6+~=5Wnlg#&Nl9P!`-w72xo%NKXexZ}ZKB>^PyIHH%89 zInk{JOH!z)jkcshBR+N718MwAEI=j(68pW{O>+2|C`#>|xGg^0>3wtWPo>trI8X0K zzn2jZA%C58mz+qHWl@E~#Gk3& zK=99F;WnXONa`-Q1f7f8tliKW8TatEw&L59D_6$TqIO&|HbvxsM)GOH27Ao{No8}) z3I?0}b1f~=0E(98=Aw#bMsaaB^9xR~*;mD%%L!I2S(eLZR;HqdGOqM3gV^#}KHt@^ zwm*{>zxAFpeoRUHtgaV8baN~7Yic2C{PJzHC-0B?-%1Ye`9z3>P}joM;IEHzdVLfC ztifsArd-~OvBeO^S~APtb*jc|af!%|sA?;a(t4n#Qa(JMzT!Han5*2L{UaZ{c-j-x z;Ivz_u736nj+eGwYPR8&$j9u|n7d6b3oSh)iv2aHZyj>{?6{S?tb}%v!*_*G{mN2P z%Q7iR*1l?ZL@mgI5f6!G_`KxVnIz$LLynxNL+-rWzp~7sJ5LT5I%y8*KKFhqa`y7^ z>Zd0XaB_8##}O#tHKCz_=YGj8KBA7g!Cjk~Vfq-X{=!oywvV8&?NF z*4o$d%&>+OTr)5)5D*4A-q>0q01z!j-yqzi^PdAS-&FWwA>Ekb##D42ys4=0c|M>G zpToTTb1>qgr)QOO_JR!-og-S530)U?q{4`rsHDZlZBcwW5`B*ESA9u*1>Izi%+po$ z#Xrsg(EzU2mR=#lU>{xmY@G8{drdrNOk;-!!_J$Ph;)yhdIwpOw~tB3v!+Qg!>+rQ zwDBiGN~ne^IQp|SUjnN4+65}-VXeEqSIB0eHI$f&Z6VBlC0zbN7n%6QIGyfGp@s*C z$ka0uKRH7q$`pFG*r@A3E}lV735l2ih>*ek27KOWu<<>m(q`Ep{hKmmlQjNnnw?Bc zzBT3X7dwUaY|dRXiP%B1dWQ{mR{>Ajq8kqe7gh@ucZ7Sv+wPW*v`|t_6~;%8iP86a zx7Vm-kmEQrJrk4SMVnUWvYm@d{P6G#W?6sYprD}LBl{jT^q2pe75{I7{EQ9p%jb`R zT=6H^efmus%@gbog*14`%?B2LI&`r+uf=gHpeuYY_Z-lsYd74=(O71~Xw= zdgISEOs$VYUIGE^-xV}p>p0gszb7r|xSg1sfo+eoilNOZ(6xe+g6^R*YXnjoOqOiU&Vj&NtYOE5VpKzv%MB>!^6dknB@Jt1yp?)gCW z{_crl-YrgBbWl~zXJz{f&dp4^wcFpi8zW7>IfWGIUc2m{wst+4asS+EWS%IH%%1|i z;8}gR0Vu!r2>*E~?v>w+@82y7nvlny9miazaP!z9fbm(;qt>D)E?I%-jN_`*jfj?$*k9J}~m#Yn53b615v;c)Nv_so%>vxQbgI&~N zcPvf%-L9lUC-69?6996h^Z>du%)c!4m3od$a9~VD0D9YHZxMjc+|z;;uv#vI9D%wj zw!wBdF23mpQ*K1esL#R$6&Zc(hiv0YNM>tv5$b*#on@#5lEozOClP|F&Czv^QrJt8 z8aP)aA(@E^DS8N{jdqvAJfI+i4~s=tmdu9HG0E!8ViVB}L8ABH03be5KTP{gkQh|< z)3)PCG##p`CV$93%LYSo)?9yD;mMj@ z>&^5rD+-K~eeEUIb058s?EMjx3GVoXC9sn;_$50AETV!Zu*0;ml`%Wt>IR)bBNQL{~IUFAr5VMdFq>kLB`9}Dy-T?82ov^@ zyqWaWkCC-t0BMlgUX}FI%&5Uf>;yIR=#t*&1t5nh^)3IFSRd?B{GJn7aZu`NzcDv9 zL`a!@Gry}F2M8B%fL4c|n ztg#Ef-TlModI5BAZ{JP~85`Y`&m=TZS+zwkmy8Sxv`4_dDjzzAsw-2FfIJ&mJKGeo~Ncj1j zLr-uly#Qeg)mA3NLqgrMp93*%BL=WxI5q$?-S3-Evt?mMq8|%V!w)y#q*w5)>-rPA z#5YX@cagiJGoZ5Y+h4AMF%S_=t*Y3tkybMgYSb$;9?eq2Hq;d%rd;g$p39z%y7${U3y_0HBVA`6O zmw2!c)cPtVaRg^i?i*+ZZ6BJ@H2fWH-@V3H?wPc{R~=0~(>Sb|sH9FPoy;dFVx&&| zW!OL)CGCX{h;8%1sT3+N9j|?KaDW_&jI8d1ClQ_n_)I@~Qa8H7GNo^sWx= znh54DfFi6SNGhd5Yy!J2dJ(ZPJ=wV`B1@{|hBbzrp!LUaL1Ixcb52M|P#d|GDrO;6 zax>-&h#2T^i?@{wP;ngcqH9d?Hv2_`HKff<3(FMV87h`5Ohh?Xqo6z9gsc`LgGb(2 z2rN}v&kn;utAoS9vT`Ik3ChKr<8`nFGhJI;X^9XNZGZ~`O?*h|=F&76%PcLKpMSvFm`1aJZTG)dLItRe)q^OWHosBMUv%HTTe|kxgqOt_}yum1rY$ zQ@KKHI<=v-qm&_JWz)@ru5UxgKM6YoQ3N)OENOmZmGD-KM62q5{@}XU`z|N|^F*+nxNqRkv;upUK}a2n9oNJ=x?FGdv%oOgo0jFGOPz zqW*cksq(+=PIh|MewJBmZtt+mZ*geG8yc8iU|b2Xu~!4x3iD*+I2A)+icL)hcdfXR zjJeSIHXM5slNTL+RE}t&`Q*DocBj3O(}FI_B=&RAcUfY){CjCGR`G==nU_`#{(lgG zyle>t8<&0e`5C%|!9c zh$wWTA=+Fh0-*)HT|l6Jn> z^Hr#Xt$lwuyC+UE#Z3cCG19g%?F%cGm`300G2%OymRGQ!8f( z0#)qA9I6u#fNzapu}NH47M6xlSF!DPjF9duc89hDj!_e$^SeA`ZCMcoZ?%N@WXdUWTGPLMu;AOPLLT!IGr$+c8*jehJ;pqpq$Si~pb z?@;RPo19ZdWcqQ=KEqEh$w4tk_b92FFMcMY{;vnHtmOej_C7zEBmJ628#zY9;?{&N zTp=XV@jJ;I)m5)`rVuyTJb~hql;&AU$jdF%>8#5;H)}bH`mv_Lr?>)Tr~u7<(|jmU-y(Abr(bbx zDWSLt&NKe+=k16e@!r4m?GzP>bOM{^ycJ}V1zm?W(k$OgL2Y<+4~d4zJfk;qSumv@ zDtHx^)+0|=^M5{c^@M9FvJ4jTCU2bAC)@q&W@}|gWk#B>J~BFn-cW;f6jZ18R6fY~ zkVAJ!Id+y>PNg{L?-^ZwVxU4h(x>~!l2`8M9|w!BPa4374>ZpkF<{*5C+C=rWlnj6 zavSy&cW%*z13<1!YBXfGUx&JlW7hFnr1pY|6ZcvN8o>0Dy)0eW33%K#AajSxm&aB@ z{`8>U`{cp(6Q93@N%lkJ1CX;ovlH7L^^%@HHSQ;arAVIX1}1HLOLq5}@_Vr-fJ^J!0^t zGtx0t5E`m1j>@-oHfF4&1SWAh1%;SiXa6V>)%OMA1kq7RACB4nsPO$6 zid?@s(c6HLDGaW#1s zw;DhTaUA$sHdz=V<#Duz#zYsPIK#OgDaEl9qH_kYebxAiojd8tD>VpC!PDNSxk{(f z;bSkKN@dChiKdoH0d@6~)V<}*b>nc+bnwn2 z%r%{zy2!8sO)NMop{zB>>BTt~h-8n_%QMSM&wC;BZrIyPwM-F`|02s*B3B0jPF4|n zQPGU8k`|KAPvqla9vHNVo&nnL1B zNXm0ipDD#NS}lx09S(v9cn6Go%0M*E7StA9Ep$fy^!+Q!UidNsJ7Gz!p|vRyz4QK7 zb^-cM_Oz@AmTyibtB2Jc`SSyMSbeXZ8U~*h*iVZ>h@l~#mAE0O=|8l@25 zxSq07%7zH}zl4Z?;IgW76jt&(p)&USTU=32MWQ37AbTzmp#E649J{(L>j^27Z$b@%l;rPcju0AT)|Iu`X~6gL{tUy~K~E@>pA zrWev$-UX?u)7b>gdytTs7tgV5zG!m3& zn0(+97^>{C9E?B0qiFErTy_LRs{FFh{!1LaWy}U&Z0$?<{-ciy#oPs&?IX%;J_Z(U z#v4$nZB*BE*{J7i#u=pHix6?7$RTH+r18!V_#|-C>Y;aG^#!=PrCB7CJ^SxHt^3CG zW=ot-v^PD7R3{4Bo`rhS{2;4WpOA&6JYEK;cXv?F{kBeAI)UXO@Uqs&G0_O_ATSI$9GmZo5J2y|Zcn6_ZME)hr;64lDs_b#3`xYET3tbGj8GZ&w&#In{f ziTbx|v&Zc7DS;mGCV_?m7c62;qD5B)bhi19n77h$x8U0hSIH3N47^w25NSXt_eVe#h6qu97)1UX<{S%B_gCt6(8dA~0 z?*JORm{pD{#H7Vzu=^*b!Xy8wz&65UN~eLtg6zP7X+8R1X#;-(iz3<`#ev>-3d^YT z%P(S2p(bRrecIWin-&kzq4`NTYH7#THHst?kI~i-&iyJXn^VMz%i%VM{nr>VtI^oH znSQ8+(Xvwt>MjZi1EvI6W5;lS65ou~l5gMMlGo<5crQ#at!a{q;bCjggy~ zbK;6{($J9X^4a}T@v_0)R&IgRLyWA#(tfn?D&fG@iSU51wdVjLEf%`>SL6DA8pQm# z?`+d~F+3*~@P{5U&Z2&}aFud>fHxd3lk}&Ft;AsuaLG{efNrh{@IR>jO8<>r7&8cY zDHH!#5gLWilTG4YLC=AgBTC^RZ>OYB&x=!}S{~Kn9+}?MKGELXEzf+iu2f?H*b{lr zNNbd6i`CTS=BO0OVNe(~AN-ijg?ZW(l8P!=xs*Et9ky-)j+yD-NKUiLdAMy?i}Bag zp27F}BHvZn)YJtJPU$PY4WvuvKi=rO&utAo!DU_TiAKJ?+$wD(_4Aw2&Tw+~cuv*< zGUDG)8YlH_V)Av>;&iEDE$V7xMQdPoi_&>5-FFX;RP`JM%=X!evyYbM^`jRThwiS` zvOGB|8;BQh?1x7M)LnGhAx1u@j=?N1_GH7*R|E*tUCgn!YYQO6!H514q`p^My;a?0s)ZpddO`Wc2nTMKQ~2X*y}BZs z&}YM|?O6t_$jjD2j8LQ$j!&CrE)_Qi5E_9YW=d3fuzDf}z%SHTej?SH<^<3TH|2~7 zkEFE!wU>cs!ALQVE8>V4dsN2cDia4S-KyxT?4^DwYQ={28ZW6JxSS|jvQYyfN-!QvjF zg|1o96hjyYb0c%77s5DT3!=)vKO{$QYZ*`bT6oi3HJl$h?r1#^)R#J`{0jik!e(D~ zQI@n}>uWPIAF2s=TwboeCv}Zo1G>7ZZP>ssK029xd32Vyz2B6}Ou?s?Bqu*URC0YS ziHBfWvHXu6fj}&xYuOJ_))(VN3V8vb?a0IZm~&GIG!Urzh;GHdm6=JPi-DWPw_;XZ z@tKP6HEx&Q{SudF?#j%hH#XVYtGpGPo&b3OT$EM3+|OM<$oLW$zJE(8t8h;uu-XyE z(4ouucdQT?vXl&NzkSzvb{ufM3oC@~|8>5*UNN|XJ`h>-1qHk#n}dF;FuN8(68u* zi`+P9of5tM4uKKYfP_aw-Ug-sPXr=U;<(|9?MS-E+XyD@Y;|4V*y=>yQTI;Z;y9h% z;k~W%&XaQjUAUA#R==A%N}Nm}nA#-z6 zti_uX)3Ce{SYU3WE2;P8xW?IoTI*XckOwO`D)*KH8o9T)Lj*e11`#AfhAljQ?y zx@@e3F{j2N8{S60Vskqc@Q{5!O3CZo;`Y$$wa;>-Fy9IrloOj9lmJnypa5QXj>ISP z8TFTZUIoK-2xNrVo;r<^(MRtUpmTLaR8vHAL%*O&K6Y01Q=jSZ%Fl!cMiG_yBFdd6 z6w4SZm6p`abUxByQju#1I(JfLv&ce#&)^q1(>2HHzX!?+NxzJ{A0gKZDHKyXmYr ztA!!gqfK9Z_&eDb|NM!Y`El5lgx&x$8wD)v!o{xNxk*&Tc*kPlPGY0@9hHZ95K4*H{$iI0+2gy|84{5)(_9-PH&TMgDf|+9Fx{J_Ux{YHbKFcTaMuF zlXn!r2MSNte^@a3C9rpLkVmwXz9XP+HdtyA7yAt&Rx<-mQ_|LJVVLZd{DfZT5amtlXBE z(#Cil>DQK)v%iyqf{9gcgXumW!U$)?j*i{bto872Zr}bIS(LcpFnS$*!{{yey6A4! zeUsW!-Cv1MbYb9qiB>bt#3J@wfjLoZ4Q+&mKfOb>|H9dF^zG6mL;xwn+hnAR(L2 zgti)55Bv#aJZZiNll*Hf$Vu9GhDo0_p~0j+<5arjTD9s{L--&I;x&)OikNrjlLmWb{UWl{!?NJIf7K-<6 zR6V=&Np?6%Y<1$|MsV3z2AfXQk#vR&Q4wlb`8 zA&-TS)W5x+O{oP_(#|P#$D)JOz=R3qXNsbWOG^tq`COg1#~pBm5E)-Tze22`^t#0# zKM1wv9e2-ALk1QWqum1yCw%|^q?6q! z7p*J$_cFCbe&+_%yk34$1lISl6OToppDz^%CN)t5I6dBsk~W~hXRV<%H-D$82(H|z zeGdU^Qw~60qC5K9*dv~>4NM+QwUvdreB)sc4*zSsf2B?iz=T29IJ2csXou@SL!+K$&2 zdO_r$L-bnB=C>%sSh0{HkyY*o%47GjPTJb42gO&vq#Y%0c))i0T3S^`pQ=9Ssys1% z*NWPP39}#FC!Y;CRyLjwoP(ij#11`vjKxcO9KHw1@UATZx8QSnh4}z%$7B|@#eZxc zSwdOqbh}~|$61x2@3D@Tdy-tQJT|VTrGn3vobRaVqS@>o`fn9O!UwEFKb#wcK7I7& zJAS3uUs{1nz1$v86xTTK_tz*&IF!L-9cIfW#e|Yqwh^D9D|K4|{Xo)eZ$4}Gi~RQ{ z0IYqv)-dmqiU;6rIMU{PN#*>B_mf))@VyJhCzdQ2ORpn_4vE5DweOr)d4Ez$}|zVbk8u5b;Kx!vcv#w28EBTYU3rx=ABy zTotoz8r@!|LQ9@!1((uW(D zU8XoSr5G^tr{s(&4afkqk6WH&%-Bb}256l$9#RWStPudls~ym3fHu=)UNhLVKUw;1 zLgG50VS66&9n305*tEPS4h%0$Bgk`01Ulzo@+62xti18z(Mn*l!=kctcXFOc zFsy84h5N@_MBmUbY~h8@br8K>dm$@r45RJbTfE>dUTno~mUy<`go9VI_h&~E*1_La z$bO#`!bA%@Km6`lg28wdpHF8e!;_!^XKEF{-#_3NhWy(j2Nrxg*Km`);eKd8W$>oz z(NNldNo`-C_HtI*;-4i3G9>3wm#EPnsHrG0?Au{QFLy+Hs1MDRR{w@0nrJUz zJqG}{8{i%u7QURE^1_3X9gk?n;dF`|Jws1i!2K>Z11SQ;G#wUjVJ5_8wW-<_@M5)> zy_e{D!@n0gdA|MhK4h6nDKhe|Vr60}HtCpkT0)|vxsLTeF^E;XGd_+`_boeg8eDOA z9qxsAMpbo9Z|QBz+3BUYu>zNk#YnSB@t-q!V-tn7-qzX_~ciPGuoa`5TU;A`q3*8`AW&sg&&0 zDA|7uXq-qcfUkemUGm|C3!Pk!E0{lD!e*|B&?-PEkH?)Hfw5AhmQzgG(&k zAteohu=KJkA>G{#O9@CyiFAX~(o0B*fPf&~4N{WN{e5QMd1s#AU$8qnJNI>8*ZG`t z;!r6XN`bo*vXJ1Vj?xeuRfF_DXr7)B*E=7dwpN5SRO}b8NvNo3S6tCCUH|Yzmr`%m zvhFj~I2$`5+bMNNx~uPNks4N;bvrxWnzuHEtdi79MyGV7K)iY37GDp%!UFu+BXOH4 zqurBBSg5#`eP~qi#9&U@MGVwhRqXL|L##yqCzfA%m(EjUmKyGsb)EPWt`u2(q7pmV z3{y!f=U1V_5lh!}ew!fdT$a|Fb4~tC7DmzBl@Z#-X z9J}fUBQ65D_|pd-uXhd)mE`5oW&UlDDJm=XE-v1cPv~F_Vq;@x3fqH-7-izWd;!#( z1M)+j|FM<Z9~k_dtsKYaUZy9dbVoVP(oY65RipPmcc!`kWP*4y2$j*8&BLYRJu_57PjmVyJ&7OmBR(q;z z%NqOP{P;PTIKlhPGGuEy#+_=I^>LU+KEo2UjZ_IZiewEY{~l8P|Sr+@79pN1x83 zEL<)Itfoa~R&~uN&PLo^Zj%1aec494qpXT@-wDvT#2bWC21CVt`hJdMg0SKa7Xi?X zu!z022c9Ac=4k7Khe?j&Q8ii#5+S8wR@(`uKm}FS9*!ifN(gmq_8&_yl9Bf~H*`N5RCxX;^3^@ z1Ehz;nI+cf9KL26>2HA3i7xK1hOnGC6853Wo|npbnCq%67#W8(`bGp@gu$=I*Gr4OM z%H50u=JV|+D-)Gl_NYd67{h&klhw`rV0fX@1`Cg*H2+G}3yS9NSS5+dxDTwGcK)ct zF=s~U-jM*An0N{Z?a$UeO@z8;W^h^Gn7|A|o|O2~JnvSeWoWW<$@`2etNmmY*RcO% zR$NSGZG0rb2Z(nK^UIf;tGjFjhTh^W{H?x2Ju{jNmwKD9Vq#typHWbw(0cEI2dNW0M`ZEkN$8SRp!m)+Lc7f6xATLU@U~gjZ zKt6TunH3T9T58ap;sm82<;?YWF=#3ZEx&0HdvEnLzBaBD_%tMlJFqA8+Nvv1?UXBM zYBI#d0ViNe)lgVH_{&#vOIahFN=l>Egc(G=cR0PwGAM?T>(BYPTtM?5O4$Safr;Dg zwe!iD>%*MG7N4_cytxn0bZwIGB>FsVSV|Dre}T>ZVaB!US@Gw*ulrbDL3i}uL83iI9;)e>L5<( zmXtMpI`|3I<5b%Zj_$R*9(j+ei=uDsIm`NzJIcR*_IBC|)$@__*g85Y*%O#J*}*Kp zWsZsn%=;%M^m}&Px+edg2~%9@{96&(vwHz7qJF=A=^NJZad_?=?J9D#t2TcXaleAh zhy|It(h_=p*g2$_6*-dEb|#kelC1UCIB97urr4ZyW-&>NlVE9vx`1r9;sBKriT6Ot z1d^+#0u2@eeq8drL}<5WF=^;zLHb*W2UL0DA+%; z@>u>BHg1C2Y18|z=#mMmLyK=eN^eZ>|Aj|BB?kOzE{j2pR2LcUu7c9^k-EbgT%3to zn_6(9_;mp+=&0hm3Iaa%zkVpJI6Ul9i#R2xFUfHd(%fFSAiY1Zu$~R@xYarm_lLQf zJjp`-*3i29GjQU|UQ|~f9G!|F87e>lY^Izg>cRJsW|H1+bMnbhQXi=ozT&=Ad$irw zy^7cVek||-ItSxcqTA+!e#7X*)mdjP=JjT(w0iq*DJe+WR^OKFoXDSa7?psl{0H~v zV)ke1MCL>x!#p3UC-ijO`PCE6f05z>0ChnoU61}k<_b@73W#-7sUY)`Yt6l3mJ>MR zGiz6BFtzbVf2(oGb8w?8#al6YrOFes|55~}_!FR!vtSQkRld`bAU=A_ZmZdyIUZj9 z_h$6K$3s&|+@a>9Em2f$#^;z2*H6h>60)J;=4z6fM{6%4!-jYQzC|Rkz&_IU?Sm|M zKAq3i4^70Z{kX79(^a<4w%4o`@lb#DQj;k|!e>;kTF zG79fF-TM;>JI2P~D>@K?OQt07MnMd;ah6rKC6xTekxC6Ar&3>kj4ZhNcdrE$v(e!OI;3z>J*jJvDOqVa?oaGfV(bt2!RNTs30QgKz+Bb?ab)RMg?2bZ*= zc!WyP;>5v6c0F;iFUc{dwnOwaYf4BQhPAW|#?}~d-8cRw{z6@z;48NFi zcSNUZu3N6pfi4UU2sZ@ci9L=Pjq*m87Fcg9@@Kt%LzWEq;pTcS?T=rQwWj*t{8$u? zQT8Id;5oGI@FLUP4p!K2qTjLZnkCN6T)~^+GSHW+O%%!INF4n3uJDyn7y9x+0FLvw zTXB(2Z&DKHe5LC|JzU?vSiQ{w;gPk0VU!l)F=l*D1GghK>y@a7YsU+r&yGf6DMbr9 zH-AYsEUvK(8z1`nZf9Ng9~|FifYxWPIX~Y21qVs|W0SdtaS$;EU|Ixs5UXa47q0a< zdfiu-Rrtny3?mPCSigeaor$zeK%Yj3%xCV9gw<=wP)W|vA`ZUs`^9mL9!kONSVAn_ ze_{bXn@pi$%qm6xVg+&=!GHS?u+SKl3Jv!);Dg&1c|&Kl@4dY-~aTfGN@m#^C>)I#rmFuhuJTajF2Lai}R3N*T*dQs`aN?@Euk~1o$?#zT@Pd5-5 zxFkp&sIwin0v8WiBiM(^dQ}aGm(Ezk`Musx^^tq=TsCC86zfmNm7M4agm?Yqj3|S` z+Xkpce^Dp-`iaB{)eYh!Y^oSHLod>o=J=N=X>+CP78hv#cEpPb=1f!smDs5u<@iySTAJW_?5Oz;NB9g$G^y8fyE}@>BZ42`q=H}`i zFM*9A^_;{|uU`t7T{FG!t)%#z=KJvWO6He$(-6LCs`rIkncFr)Z$of{Sw=5@^>}y# zcC{%Ax(OK;lC4h({2Uacmp_0I9(pkTPgDaxq6$~AFNFyKCKBpDtNV|JBSnUJ*7M7y zjunp&R9Na3i2NE87*rDp3N7UZ`PsvK>z*5E)5dOw9%M4uWgeHriSq&?LRF|74o{2r zugF8Oz<5Uce)RhAKB~EeBvgOw7Qg6hbWI|+{+o>gCO3)~L&I|KzjA+X_%Ws1-Cu8) zNed7AmczbYRLv`<^)XuKIjAZ0PYSMgJ-h zkIm?aNy2c32lWtmJr>|!8`wri$YU@)OfUi4eJFs^>Wo^R>eZ}m|Jh(w@OY5wA9)0$ zgBp#p9$27YE>-9Y>|WN+ajjz`D0qoIeNOkz;>kdYyOg0ruv)lgL5mFx?N4Cf&fc!P zjUlBTM_ZYd=pvCXfnG&3BKqJMBQO2-UVbs< z>t6rlkTzI~?-_`9;0F7cI06AGvRv3?$gJwqY9u22EW%J}%Gn{V2KrKNh?Hb%ioxfn zF5G*zJ%Is`y!Kf2qSw0pmwazW(ulI<1jLiemjI#t`khKWD>xl%;PZ0OzbEZY$K7Xa z>LfCbG>fLDK7MwNj*dqusaT8?YPz9+o+W6)oDtDX&a&XFZ)mkO^D$S;emERm*VwL< zK>+918uTOs(ld`ivdHsYa|@P9*Om5LTr`|~#GXcP#Gb^4o#_P+Hua~Q0e$sMG~gAc zNH>{2{8PZCaa}9Y$Rv$6=@VVk=?omoSz<0BB&`@Kr;VI$vp#NXiOeACn7=Qt32S%sRe zmX`rv>?>9{7fMJTVK*FUnyIfRnc+ok&FjqPHL+YS%Q{94A~yLwx`{}tW4Ok7?8l6b#dF!7DQ82As7NaE ziVF(1Lls(VRCUPR9U2? zu#)mZI$Y|5a1Y$PG&8mU65F?C?iyzgDE@BH#pz$es6xoT{n3v66w77XAls+FW62#N z0+r<*2$jC!Aepc4c_n8NBA$Y~@1@T98T(z(N+4a(F@eL}72($51G=ST(95Bwe3GVn zo6&_EHiK_rzjtKl(Bbh(Ey6V~&q)6Cwi9b%aO{AUZ#Qx7>MU2#Q!2K61b zXSiN0@oc1lR+A!-*DN3Wdqg%xE*xboZv*T#rmD0;&h7R)hRW4^NLviVrpL!OQ`iqjBwH7>8EgxU$?&zu}M#?+xJ~noZp^66{DbtwrG-)R$5rl z4=OM|Z4&iT6fV#jo!kft`y>49peX*Gd>k%bT6BIFcu~q~_24;6m zJN#VH2%3-)eMqjPk;|P_o_KcRX4rcUqX%aT>m0J5 zgKq4Z1>^`Sd&lae`f>kCPd{5NPPCH0fe!!5xZZ`K@8|T@aJ;RcF&(IcrvS!8qn+;W zri_RIz!2tuK*ZJ~G%X!IVQOkxm2rNLwWAo3Hv%3|M)Dv(W3@~PRZ8^|6ZnL(WI*R1 zG7IOc9!h7Qm7)y#)5*#jsrz%j{BQAHL37b%Qz|~WfD|t)KRvHa5_pLyLC}%w(YkLb zG~LNOT<W0FMhy#HnVylkI04g@(s@ z$l67hddn(t9~_c-&jGCNR6icbo~0jbFU*yv;*t~ht@S(_KsA!va?BKobC9N}TxsbY zxafk@uglAePo>QneBM`e={(*#xzSA`c><|U17^~H9HiQHMSc~fFQSB#CVwzZi+6hM z^EM#W-UFpmXZy`Mr`M2*$`?O+JJZfDFAI{&k$<{#OtMc`CZ(sR-xMY|od+IB%kkR$ z*4plyUi#DL6qb~1rjz*b?)BRZP%O-q;kAHU3yYWx8v6hiVZRXvUE~4IK z8Nl;zd|JLc^Q$Q{3|y9WXc*B%`_Hx`J6h6+nlnyotGgxH?x{*?p_DP&zy^QG`7~;h zOR8E(eucaOFMx~2NY|APSmEJ&WFLWpESxBH9_mI4@Pq;)9IY9DdD)q7;Zn6+;AtKP zY*b3!wUO;zT08U$UEd|rDh|lH{`ha#hECS&Db8hQf}yz|-Uu_LrQa(~Q#yLcz)yVR zY-4Too{5V*m0UuQ;qy2DE2r`W7N)W^wuElXKkCqfe0 ziWRh;JTU+wontd}ZPGDp6_Wjs!07et?+pr|yXgj{%sfjzW9^t_t9V8XTWN58-ffxW z97ZaMOR^gVNktD>{oCLKF4Mq8B|uG`K4~At)uQb5R>DFg{q<`ye%?{*+3xX-($6LO zzm#@4d4^W%!~_#m55QX5$`X3`J!?Mb3tGBLHL;Lf6vFbb)4v|W1)h5x7XV7{ zniOGc){q2qWsF7zh68WT$0{bvOxg;6`bCJvky#bMv={GgiGuNZg9z$cirNY2%?>r- zln{G7QsfMm9HQpZ>wSAB%#~Pj3k_!?r7jcs%ShId1ILXebZwl0D25i7yxFVZ{bK)^ zqiqGe*3taAgv=wp88W@VxmWS2ot>qQJPD?n+73iOF?U^m5&8`u4~KXanbe8;b)^j- z?#zeehOg!O;O~k1YatH(OY7X1AqOUnq_?I1t8zccJJw}E$4?Sb^}DGj3K3)vrerba zqLiOjy0OUvPVn)y8I*Lrz-l!gKJBPclm1+DIjzliS*yJF*}uR&2xK9(>GI8xRdJ{a z4ryvKUTn8W{M(h^7bWV|hqxgnLUOd?AUYZu2PBdk=Ligd$^PybQ&5tRO4}oR7)zBz z$Sb`kq#^KbGXY}aD+GEs2a9aNOt;hO(b#l#$q*dCq9!DY&{VvL#XHKb z_e!gh8r@8T4YN9Agg^JDBd9@O^qaD9KKTk>o=ADE;l5o*Iyx^fBhF3&ZhE^dXsjIR zNCtx>G`@c`sBdH@1`)6OTlYbCvwKd-oH!3(*(GB(x-njaPPwe{u*$-KcIhk%S6@Ua zGn>uYKE5xYIu=@GA5DRs2({Tt%;30Pv(nbCI?oE3Kf1}>t%-gT1d4e|gWPqH?pMP3 zivg||GkqFA^K7psocf_HlVLO+bCYD{1pzU!(VXT76qcls#z$s?{o#hXI_ugoTcday zmLyd#ZWnL8=;$2MfDQ_hmD>0^7~v@X%%F!Xy~f|^jq-9ztW0=Q68g(|*D$&{DjUtw z3xWPmXk_3sQMu3gAP~+wgl{IvuknWNij((HkX?7vK67uhBjWH(hovg(U81nyP+(Qx zXXJ;e(K?|aT!KKM%;~z!pRaH2S$a({r_82T?Mq{a)48dv3UJ9Ve`UYx`~DrRs5&0@ z&JtHoFVJ@1R!&KYBo5yVu$@Q7eUh4Q#tu9$m|n=u{~uWlnFKb@sbGGsos(H`gJ8j#{hJgzU6cB zfbJnG+%WNH_4<*KYAQU4*Q@B7SPt@T$nuU3CiN7VPaO3KNYo|bSWu1fGI37OeygbH zaDn66;o9aS6a(OKbYux4^KoKT6P`;b|ZUfoF+@b}L)Jc%v@`{IIGT$(fRg&bo(daP#BL5ZC>Lfa&z@6*V{fGd?!AZ9eq%pJg6t zg1TyZBd*kROn@&QD+CXl!y;~Nx0H)e7=2wx_s^Q84P=HVQT`Go)?`7tAR9UHelWc; z3g$Yk2aI$bFc*X$)Caf210n6{ro2v}*-7o`kJkj-!FPE1jQ6-W-p43fK8N0hm`Ge= ze4zSRADWq^pkQ{-1-p|fuc&~nUAu~Tz?xSU$JFFp8Hx?GKH9WRqZnLQd&j+>q$2MmbmorG zdG$3^9LnzPPI}A4zM|d@J%CydlFP%xde=JXyd2yp-Bko@d+3r#n;t*B6bWW%CnDkJ z1pzvI$EF5W+{hVl7!r=DrQ}#sR>iMLt!!aJ->iDHHj;44sK3GqwGuHj;vG}r&;Dtb z8g5iJ{4qaw)SRHg2>3JVz5nq3dnjavSIhbsh(FVv)TMid+y_m$nS%ZE*_58=8OuuhCoMYHik6#{z)auA+$|%nLI9XlHC%=_B?;I95)exU3V1 zCqNuxUaF6#jzCCVbp`%$?RMfXmqy9gsF%N++eVn9X zLPLW{IGq%{af$=j2VJDw7T6T`HOw4*=aj&ovz^wp)_y<#DxyaSM<+$1yDCVRzn;ct zw!M>rX(YE)e+?beOtf-?h!%BKQeYn3Vf+6+{oF-H#R}EXj^E9a5RXCeB2*_Ez4kI( zK(f0aA*@){u*%F?FgsL=DGG&YyGM^z9L_FhGQwJ81ja^TC5lUdE>7D}8iZ1Wp7}#Y z8U-zchq<4?kXk zP-=^7+SxkDuNQF(QIHoNmZt73oeSKUnL^5HljN0@o^o>)&i+8-(JNiEe9PLwR#Sios&AT)lnLbREz{zvRM@@XX9i?0ZG8X9PBe zRRT$kZ77qKU<>CCyyBBc$BYWI;s~is1$$k_Y_}qH1wj>tp^;+*XSQW$ws5-LJRCA5 zbrFlJ-_)(|D;=MzOHrJdj;NLbza`MH+dn`v{Jqg?FLiUb zM04J&k|eIqdXR2ni%#$*b&+zNugVa?yRvzK*0^0CtW-QI?F|%TqWi4y-$VJM|ByaBY&sXn@*5ap6AvL zbW`&|eb%h}`Krz`W5kcJGgnKH^X3DY+s;w^hK@iqyJb)aMS6sGzXzWJmRR}oX$4Bb z@TbohJ}1h_iu<&UbWiGnM7$d6nt->kPjsV$-{GH&!Qerz?s+Bp2N{}^t2gq{GJ&u# z@|DA4Tt;H6>sBt7ZcW0*`-ErI@K~dz8(eqYJ`Y~cQMW0@&s{RYWJP4mYih zKU!IU%L%(>jxxa}$x@#A5e@rQ-Q1@7 zIQ(Y6daKDm2mtYlzedgqm*D&{!-_XCcHKY1yh~4uzL#aQ{4*(A=kn+~I`1C!tyTN) z=4|5Nis?&qT5p_cN6npG7PLRHdr``Ze%Jp+Tokod;#418(;rCr%~8gd)M!mNe_ajI zq<8yXb9RHG_EmZ{GQJI*FMIHqZCu@yMGQ$V09#25@|~nN(cd5%ZLaciieKeNt^%=# zsUI^Vsv@9^Ls~EIfyh@N;&t%b>n}^ZiU7qVWHG#P(!A~@{u}Vckm675F&gE?T!fzAi#fgel9aVHFwQ=u zCcAQ_mwsZ0YH*bd8-csmphvGt&VR9K2;qn$r=UR84=CKcJ{8)*%e5E$%M6eQ<^DF_ zvemVg^EMPA3|NeG)716a+Xz; z-4W8HHaZ!jJ-pRDuZawyhlFP+$-XI^}MU_R=xl+X*xv#$NI9OFf z;W)19Jil}7GjWNKKUG9$+;-IMwsuQUT5ERZKb@i4m$p#%< z5BuiFuz7|737Fm6+3|{Onx8P#vgElAcv6LQmd9ylPeonSs_l|RR>ZKyri{yg;B*jQ_OoMNWK}gm@;7ga@8bbsar=>F6&=yhqWG>n+svfe^aG`mptZ5D zopb*Nr0!1spg(}Wl$_zKsWqM~sW7q#!3j&QM?(_qnb^f{U|fY zkGLf4Q5;)zd-^Urq#ffSvJ=mmw+mdZG(LY@O`6M)a&i5JGwu#Mh$$jCtar~A7V*tq zWov#vcK{(Z2^>91&!=7@V2GTsHd^2JZ4#psAbC-(xmx%MU6PtYY}mF}dz>mXu1Z^; zQ7g{)g3Pj<;X6d+{qgqT1A%#P0-QEwrjqmBSH#I%W_7c`dqGk>#i9sF9b;^ZnXoy9T>N? zw*;eo)7ZE^v|^s+LtjmA0(q@FD)<|gy7W2PE9tqD85?J}{+^Q%Psv&MtsAiZyt6Aa zTyyZDzp8n>E@wp*t6sDT+H+7s)%jP*{NbAULE}Yl(cYl#{+^t=4P^ywj%(u@`jLUb z$vM;5hQQIG(s4J?JW*eno$YcT^u#TW&)wTq*$D`}SEUwb9;{2Y^8>);X--fYq(j36 z`daE!-q(T-GN(6fQ)$lJ%>@lM^ zPUHpute^j$6KDmNVbfkSR9IY8h^t_`@AB}loV*7H&OjP(HsCE$Q!>(!h(;+*at&E7 z{N#JhZ5M`u48_H&IC(G&@Wt)}r_;=khB>2;!Caw$13XrB-Lb|t@hwnURNzB(LnIlE z@@w*fR3puXfdlFm8*DXip_0|jMFabG7n;oKW-5hYtMW&`yO9k`@=Y7Rs` zWa465AuY+TUn6sSvEsR^za%H#_`RdS87meKySYN{Gi9sTYWGJ!f!rh0u|wrb2bd;fPlmnY_%YxsRJE9Mzlf-V+VaW9-G3(tDqkdT9V)=F7h;`S?!VmxvJ3p>LHe%~Hs6jejFw8Y#|z zgR{DoNdM|#jh60lO=wBXk3s!1A9bF~ASzgIGO!7 z;kr&EjhTd^bv}%qJxA4@W=6%1KTsXZ9RZ~`4z}w=X7)C=N$wGsbLBti*3syj9PHMy zUz7~5ACic?hn@P|(e1`eYoc>5$*g;w{hie5`Y%pi_RCD&XJ%q_|}N9Mzz7NE?pPC zfP4O3KvxqkZ=j*??sI^vCd$*2QeFdTjRulD2gs>CkI0+0j^OA*deZ9q7JGI!BUDcw;=T)LX(;-b&&t62OOL8N-zIK9gM#e`*uVudqZqm1V7S zZ=DbeEkPtvWD`M)#VPg^7Ec=@8JnQdu;xD+2VVJ!V@tfywRf?;3su3e;<7E4gkA5H0ZEMrw@bsDu;zA*fR4=-{x_f|y$zTR2k|~$LtR69PZ>nR zK%lPvLwH6r#@!wHO3#3#_;6329j$hHWI_Vn;zx3lmBI)eZB50fY=luJ1@qn0`k`ei zb&Rn(&hDoK0fcqk7CE^Y*y&jIaHTTdblrPIuo_QwXfb-W=?ewII`+i~M^V@UE|uOG zf(Lvu*=tJ7GwLu0t#JX9fLE_qS)BH$O&|OLg-bhN-gU* z>e-U`%sDde7-RoDvR+aae>&P%A#M-V`@NfZq$SiORM-R<pOn)~UI?OfW?D$7@oUb}~cEt<(eOW4h+L+zBFXRG4!;_>?;w}r)llr)e0l^v^7DC;OJO+a zRr^%48Gq8J2#s*|ukV6KV=@oHK31a1oS!7^RFHxWZ7l3;jJX6pZ81FfAx(e7Tx~@T zK+h+O$ zf;zG2>e9v_fXd@Fe(IP11eH>Q7zVsek=dx1+;+ndp!%cq6DS-GII@{@n3~X#curH| zHs*0E*g!fJ#(+nluWl?eTVcq`r3D`F?q_s&ed-!e=gcT0;enQgN7=LgM3Ca`U=b%r zR=A0LSdmz`9YyLwrYagrHzuhwB*VfOX{El6_&ZYR3VSpNxH=@cJG~WsrG5Y8$v@p8 zoA1NlB0BDnoZRs*E#-yW+>4TP#%|o^-Mq*frg)!A=Gw70uDb&`JKUHZceEyyhm2{Q ztymocAz?wI!erz^mpawL8U69+Dd%%<7kN?~R0cA|B{H!7#V{K#U0)N4SfW*+D&V3E zWA>^FJGV72y1tx|GUXd4-IE~e7+1wAULA*7T)s}tdOdf`&J)X(#lz8XQC#2Q;>v9@E_!L8NOBOG$(2 zcWj+k%kxcI^`Id=X=c^M<;FVe%QK76$gLa02h-6@4PTRg^X5D|=_Rt3*s0nqx-9T`t({MM?T|0H@B7ub z<<^m&)V;?}4hO&cEitrh+uz}LFr7a!JxbEqdGHq{%jxf%thxIec)(qc!Nx-t zT%WW86L++UR-y^rUEj9x5Mdjye1oY6P^`vV%tkSI43lW!g22sZ1*_<9~TUc(YJ9i7eMQ3lr?)$yu zKV;@$y^n_gD(AIbLVrP3Y!jQJFKuyl8mx-&YN&)KpO6hKJa;(5Iym`?NE<_gA0EJA{$wE_`LTrWtm&G0pF;a{=T2-tp?OQo zZwd~|h1Hz{$Q+2Dbl=RQ-mik_mn8LylA%J!(68Bfdw3&~fha0|!2kJrebCt}AGZ&#vO3E!91&=XW%<(EQT@iS z=CJ3rgi?7Q`+@MWuc?y}Q@Ccl{vx#r@igo@zjpv2UixKXEUXq|z^$%osE4=sJv*Al zID&xv1+^RTi$1RiiP{eiBU@B`13$ff+IHr1FVqZQQozo!jGkY7sRqlxZQ{$BQdP#u z6S&bhXv++E8~+T9h;lJfA5ErD$Z|6U7vj5Cvn_ zAp)qD@+2saeZ~g89+@0Y>XkmXQ#0QK_kYFAXuLbRuaH0fWRQ_@b8$(tN{Q~%_u?tqEU*U?@L*C?<8daXqcZCq z0vqT$OgBgl3PW7@c)TCBN3I?|WL4FtXT*0tv;LRoOjtQ$;uV%84WgqJ+e3)F zpo(*lzQ3a4><6nikSXvctbc<&J+dYp*bt;>76Apu$FHYh*`g4WXYC;=n0u5N_#(b-Ijsh zpy6#3vurlAyt@CZ@L4G^tYhO?Hzg1_eRjo#obmw;Ttw*Wwc|`&r<5DTxe5W(S7P^5 zx|dn?gp3Un$%@gZJ39J+yL{;trEhJu(*C0_Y}ASLoIfA=hb|8j8NG7yw?FiVpB0!K z=s++g0>b&A0s}Mf#r*Vtg@IW8xU+Rp8-hdgx7MBtq-STlGT+vNWgiWzkLYK*a`wX~ zLiV$7(O~ppI+(HIK{dJ!&stJQuaG_bJu8KXAFas2x=PXk(lQ1`QGcFt5m!TJ(OT`r z>E99AzwPvA_3NAP-@HlEF)>Vt!oKNM2!1{rrDfjSg6T+4-*Q<8QW>{F^mP8c<15L- ziDgsLT35melJ_vDK9Z`CvkIhVs6#CJx!_#AIQn}t?ve=$qbCqKc{`9xWCJp z`GWRFe*W-a!=(3)`{7#q^zce;(x(#b>#Vzud*rU0h+VCK*@)8EjFkS=%_g0YDI1O( zgPx3W&1PYi9is(eWR_qupeUxah&N7sW48p4N>6PMjxpf6%NeV#U@jDDjt(kyfsPKK zSf5Kugw}GHcMK`5WbT0ofF3qrfP=HgdVVh=W9fd!SwiH^F}mPXU^I)uBmP=xFeHi< z4~cjuiLN|J1@QmIlz{c06sQTY1R`zw0;2aYSoe#h$olo;JLK|qMK3z2WIHu@?a18{` z_KclfjCOVvQeVF^-*yWL3lB_}r;?Dag|i`T5K|>@=9--e0Iri^JogcQcem@gBnSR3 zbw=`VS}J|Q$0|;*rxhxriDQY{iEuZ{Z7W4rn8v<`d0JSu0zxfejBT2iUCcC1^BM55 zDBsuv1q-YMcZ4`U?}+w_aD45Dy8Q_N`FiS2qi15CJhV@~-FWY=$5+TNNV6Gv5SWtF zKOX%PHdt?8dOn5sN;6i)?e*d_el1n46*)0>c010^FHp)XTm~C!)=x3?UCFrw^83-E zL%JUa%}_uuo36ZPoNx7-lzziiuyF00J31Y(n!=bR3wiDd(DDOQ`8@3!&9_q6#YR*`~2=zgM9%<|G8ul^xX8`ml>$+gE+_n z5yUzg0;qoO9q2|y1t(+N+{ck4c1WhZUPvi1QFJ#*je$5x+f@T};Y1{G( z>MQfxBTarw=7*v5C3K(0v01w684_o=DGdSu?Q3ayI38gu(tlShFs$;L#NX`i+2jr2 zf?up73Dhs0`|(UWEe*iv(8zu zvY}5TJNpN|<-?XI*Tv2Nh+?UW-npwDd%-+2$#6DeJ>f#IK6u->BY4L%z+*%6qqOVO z;GhTIP7_gr!CnU!)*%cP1@OtmUAb6SFXJR_*!#=h9&ep;W2^p5@?P{?(B8jq5vQ6r zDJk^W)+m&LOl^K^)P48`PCLR!+*@+aLkJ@mFp`^51g*|KO!kemKSd;%eRBo>4_jXu z)mHpA*&?O52Pod+?k)vd+%-4^cQ5Wvuoegog%&7Iu;L!v-6_ExN^#r#-}CP7Iqz;h z-TN`;+_lE? zI|kPHexrtvEt|I-GWiT0^@JiD=q!%dBQj`C-QS$2a)(97)8<&!n~O>l6Us8Vt87;i z^w&WULCE&m_$-cCudVojwU^&L=foQLR#cgba%F8T4&SmjayF;$H2Y_Ealf_}o#%XI zp|dzx)~yQZsugh*s@Dy#t3Wg(E4D-aU>IsF-i$Eg$H%VJSn!T>V&Ck^lh854IRbmL-^aX|%p}Jyk;Gab^n~S6ku8Ct z6#lQdPH1`q?`u#$kisl9lgj$MhQANU%Whl7Dy^aM1_Ps3T^ADG67{~`E7kW2_dJBq{R6Wj*=B2La zKQncf8XfY+4}K0@zq#zi*;@fO6Un)s1-aS0} zctFse-`RwH@%6=tvqallwzCgr5B(b}IFWXSwMP(A`F4;Y|x+WRIaqXw&1PZHH zsT{MlEweEdDO*ZUve(wp{d4gH&FAr!_CZrK7U#=M5z*{6o3oP>tU;9c_HjatP0Tg4 zGd3SBCZADJKafX)+M8OUD?IL@De&gaDVL5|8+f<)jpvPLod@+t#yL)FQy*I+Rk!9r zVdND?y;HbgQi_E=wOR5Rjeq71 zU3U<+ZU4&J6zM`zaAz%HUuF5dmMx8$P)9j!M>)Oj{1+Q0ZrqHz#CF(Ws9?lO9YJSK!ju#cQF>9+zxcMHUP$$b8-x^r6=rv*56q!CBt#T(1KEUbm~wi% zRehtrM)WS=FAbwq*sf9%oMkXZmR8E+%W;mUJMjS2!?N1NfI!dQ{=F8`x0Ee>YQ`!p zw%Pn$#Eb+zpm%@S1V0_&n|Qr74|(?mH~BvVz*Z;k{(8XnAb#8Z zVYyJiEf)_D&u!rUCxyO~I;^~jx*a8MNMm*Rm5E2jYCDQ^Gs=SN2Q8WQKS&HEH9*iJ zE1RS&`)l}Nv@%K*tGe-(I-LRF(-RUOAcT8t2`D#Ez&Ed54NQIoK8n^`Sy5gVY_j6mElSQmu&s}J&BpsX{6V3L&N<(A;@(XDYqk26avFM>7~TI&CCe9R-x zKya@wS#a%H?basf4>XmNT*SU%;p_~eTcT}!Ds;Ds3b9xJmOpk$Yyuf316q0KmtO;Q z3{Vn1bXk|Zyu;iNc*6d7e+%lGB~sJYK9Bew4*}dCpGTVM+;c{w4q-!Ro0Bde+DwbZb*M(eB9&k^42dJs~N~lkJMMVY729&-o-rTdaJ>4s| z)p@bwocUw7{;urKuI7X>QlIuu$a!y-BzU;-Oe1ehG<5IH$ZZ`73kbDOl9A~7(t2I; zS-RvV1IBv94 zO7VW+8d{>ZIDQN8$C&h~MH^*k?y)T7O~J&f%r&<>L7G9Y;-cIOaf4Xk^Q}`DJ~68YA(x=Dha&t;Fn>+->KxSv;gU417UtEP>4)oaCxJ{uw@zeG|0&2Trpl)h$l9`sv6 zwIeTiZ+B%$SCJFel$LlKn91IeM4EJCZLsMjY0rFqcIG$ zFHc$ssHj=)##+CBN6K3A3e){*n923eVX^HiQys!vCh~3>lXL~N8NRWa>1nEJUyiA!j|kD(ce@)R{Q&$z)YDYAX#W;4p+g!C zD-!@#NuaXM9z)~o8t%0#X1;WT-{kpt{iJR*cw>H)q3%dV9?w1>>eDNLSKWMXo?Ai* z@X4}(q?3giV>k8sWR);Wuc>UBm*bF-)1&w>S$xSLC`KLbnb3QamsjiR_O&~enTJ$T z0bnsO(j8FGR?$D))t`8!lG>dCttAyy#StL1GIwFcr53~7%v}4FP>V=5&<87Ao z{)~J}jo|a=yN%SY>)f?JXVmc^BmfJY3@Qk!gW2aAz`W|rfJkG9TMMm8hVZu`3UccV z3+!x{D?TkYbgsSt7biB1cd@=f)`-pj=rLPk(!;WIkv^0-Vqz;qPCX_uv9PTJYi}PA zxmeboYg#YlPF`3!vm)lBsTX2*pf4-ttB*r3-Lat{I@f)i zDOXJNqr8-4@X1a?8k_;QFlA(?zqO>PX`iEE@?sKgnBoan$CrLW&;B9Ytrt^cXG1Ac zlcq}IWE6O>Hzm-YSd%mCu`&HHUV*=#Z-iXbZ8hb+Mie3dTEU+O_Gq$^*MsS8S35cG zU6R?|fo$LCYSJS5=(xMmQh#Oxv0|Iwyovf`mca?0Q3v0@%|duc$D}+IVkX!LqWkEk z+-@aMz@;2eADl*GS7J#iT*78>tKS16iN^U?f(rJk#_FUx7?J#GYzodQlk?Gb!@W6O z|2VgWwts9Zhe**8X%%(5S^p4lY4`@x;UC%>m5+v4IN<$~->(;JRZw`yO(oG3o-H`+ zdUmZN7UW8PGM3OYk)7);C&zktSnhhYOKC}*O2c0As#JfQV_dpc9_0EcXvP-Shh8{4 z8&)TT$*g=(dN8P_)IB$G_1T2raF|Dk&AI(+Qe!qMLU3MoCMi6S57H|DGkXa3@x0D0KdUFb8ZsKQlOD!@=N zb}uPyo;Z+Z(_o;IYuXUpN%J8$kgN zIpvI1rGMmU&`!!U6N>JKmabYE^>n+v(E<#oc&O*4g%(zP!cjo))5O??w-Mnappz2P zaf~ZjJPmz+*E5@*jUU|^BpORR^5M^+2(d*!@S}{(N%J3|W?2~vDgQ8$s_OMy?5lxV zD}0~J1QnsjQ>uWMG44-KZr{e#4vG&*QKBMKCsFE$VsrAzVBIyzUqo=|kDt_Ylp10Q zeV!2b0Ew=c``Dm%D3&5p^OVQ>?Df1}GJYf|Y{XGg(_9ykibu#cu9hVAg~i|WXU&Zq z!2=m%|9kq+ulf%ThU8ULq%}1u;BNtP3a>z6{qH}^LGulfip~>ZSAU6C+acpKDw1uG z1pjni*B8Y%EQ=41-2M}Kl5KHWa=$;T-*(6&e0AJRRYiXNNiGG~Q`n8FHMbH782t~6 z3-Daqs9%`0+V7-k#)fV8`1?(fG%E{BSXC9ri{s4Xz1&s!H(H-w~2q|_$ZeN$N zDBx=7T;Zj?pK2kcT5z>hPPD*h+f_FRfuY1U|1#kw^alJ+scS<0kL{0VvX|p~dTEkz zEFDu26&*wDZ}7*9$bAtv1Qae#! z8Yh0R)c>^l{xlf9pUF>-tgGu&SRBO|7l&BHmwVagw#}A6PP*v`hx2xB%{KcuN5IdiLe4aLW7w;}6gaRH}1MUwr0z7Zo5i!pqILGS|C)w$sk$%)wt2Ubv z1B%tlcM?ywk~r;ksI8`^k=`4AWE*XlEYHgg06S60`@=}Q$9gl2j%;3ZoA(s>t@G6S z)e}K3%1WS_iuQ*}pv{C#*E(|FQ>vIS%9Fc;5ilb|#8GiXzg<}B!T@B&uEd>fd zIil%lo38jC_7god%0QM(mFL}g@>D2!bE=g#s@&hVsEajD*N)V3WX6j_wr&Xj@h?hC ze(uPIYe$Yfe35e+;OO>P|J?{Ft@eMGN4=UVP_2gy%sbln0j!ucKF!NSY+Cg&40_QD zjqEsVl=v(0^SDx6le+WXk=-C4w^Tus+Vj3*ZWEuSel*UO7EtM)e_!f6E}TzC2H zrCD|L9#hkbefPKlz6nQM2H@9QNf&*e=8>*36+mJlPlh8a@(MFgO7tfYT=xy3 zqh(Y1qF)6CgTY95w=9kX2TM$bc^nC3(QHgi=@W`AsOmQc;I@=DZ2Lzn>iTlVQE7s! z4e_X_9_+8WaYbfTQiJj~n~9(bb-7G^jf4>{c~1gR^xF^?s|*&Duj>_l$D53;q9pzG z;`0np3=|+TsjU|l)`pc{7{*p@SW(P^>!gC3LFQlfkhUAZq<=V1${i0j`lYT9y)Us`W>}px@%$BVd1Juk#}!2ws~1y*#tl2pu&QQ%hagbnO3( zhUpi16CVu_9}^!vDzlkd{)DHzU=S0B#VtwCv&f5b@g72lHNQblUvc1oI*q!SY zVkdm>_w#(d`>HA`7W({BP=9ea*?d|tN=o{(wB=vs4E20rC@C^Oqa12dA(_R#kfV7# zqYJ$VE$&GYEydB!b#4$3SZBb&E6E>YU!rsD9t@NyG4+weQ5Uq9Q1ljo!R}Q z{vF{{%W5eA2!7+-vKHlhud)3001{dm9zc;Zv9DDsDRh3vSSz`_SRc+wnnWg z+vY&@xE%>a^@?jfH1Lxbe?0xM?p4A`E;`=HUs7T|$mHq}_${N$``o~otG6`s8~OVm zIi;-(?DYp0P3K>TXI?vF;Eia{ex+THK=Z|8Izk&sog~=57yfhl1+szbWWLo;KbplK zN-OhVKG!Us@NS{RacO#c@avY#FMk}p zAu%7ldysO7S9MPR&@dKCuWAn;D66wckSj_}3qN90DcMpm7h8NX&Fm1-KK_l;Uz_Ll zbDz^3s$C<*A&HA7W_04_;YH-$KQ=~&i+gc*?%q4oi>*guV~k-h&equ5ZBlMr(FJX1 zcOJ)yKxuLpUk0PG?9_K6^%D?n#9k0NNr|;m%k+FWAedk6Md#V6#{nG8s0Jfi;|Per z&q>v+^o*;7lBs#$yWzpu@pGQTygWSX7^AaUkt*LW(&?yD~gdR&jI)^~K zw#+|?@tzsbL-Se0Z;KgkeBWPbABKJtP{U2A>w=T0=_op+g&ClMH{=z76-qMf4gi}r zr7e$phmPUYsZ9R3tFx&Wg?kD8k5C-Ii1z%n1{>l|P zwr;KSg{ST)*+kEfOeSv?rw7Be&G4ehXogtEl>>i&OC|j5yP1>>DhhQeS%|tCZe=r_o2NU)P~K;~fNxSts4ZmvxF?j;vx-accc1 zN@W5VWT%~6EXzaeDP>^8Dqrr1YRfUpW+;LB^~h?Z;JaI|qwnX=Fg~u~5_={k#515e z;;Rz>cT#aQ#h$L;N*w;Ki~$N97fQinbFQ;7?V}z=7&1W z@tV1Fl@teKYu*wl{h=?^CB@t#_Uh#r7p z^eJVRM-~0YS!=13T1T~ZMk+zMtH~!G%eBE)E~>{FqiLPGn*r909D6sL?#|Wj#7gRH z4n!lfGl@3*!uceDbZio>sK0s1&&4{aI?D~;ei8o|afV)qJYgdOwg}_@jA~3L_6MmW z&qg@l9_7b8WzkI756?{Rm=3LU-T-;rH(m}*2A@r`*EM^D9PA1}OXfIV;8pP6f~ zzfNzw4R&0hfj$Qew!kFAKh^VzsUuAWrh zq3BkD#Mz$zWyT{WohtYXJyaqy>b`eCtxVk5DEl`7X{R4Om4Hd|`F1GMfPw-#Jan5| zjcRS{w9*Ik9?AFbIZkcV#nEq!HNC#L(>)lLeW)EmEK6 zKj696VlFKL1B(kf2zOx#AHCs^Ik7k1n-JjHi5c!`wBI?`XzswKiS#;XH5$n2{di2z zm8NRMSQ!OXoH&lF&i!C=y)mt>l+5zF76s-9WGirpX(z|W$@?VaN61XX(*JZ~bWxwi zT+R=C6#tWU6&ZP4FVgD=%pu0HK^<^!re5pas6HcSoA0$a@PqKD1UfbN!Q4%Y0*-UE z4eSqCAjw7|iTq}4M`WUewrA4GDE&C65r-kr2pFO_xUc1bkg>|aF@&=q$iUv_;4sY< zSR4-F*r4I2B!d?j4TN^!l(7SglX2YN3k@cI3^sg>3K1vs%=-59grlmmu1QrD zF483QXP3hqidj`PuBfV-D8l7fGOc!G4y{XJAc5wz0y$<)15US)d^}|y**D(Khg@8} zI#pVG3ttKsGWk_My&)Rg`Fg9BPNH=LlX-^)^8NNrtXh2*Okv_IhFUR6-v90c+p`I$ zR$d&nCOkUj)-aeM2&+B#MmBKa9b)F+p2d2@^E!fGf+{L12JCS7XxY$|D4oL?fwAerMIcgp~_YWoWP2}3zFZ2Zt|BB;{tC!+!&4Vrh?t(MOc0NW3%_~}OKRaE z0Q@BXQh12(;1$ac%wfY(kOsAXTpi~zSWRA5dqt;C|}TfB==)($0WO{I;_^-VRic0yqL$fZr^@M5BU*jc=41jn(| zHhevTvxe3&k(icu8BeH4_&J6BB7a&ME0RF~<^r+^U=-*z9JEtL9#8z$Z7S^_Jr7c; zE*rHsg`khc@l*tvRIuu+cYbFkuFFU}$cD*+5{QUP-FEq&UT#osnLNH&(Y(V%ONlHx4nBvdFA5m=|)k){ZccZPvlI zLQk*~Ignj)XI5vEasfM{em3N=w;m~29ZX30%R<8*>KgbXV3vDa4mkzs9T(up4)iVG zx*ye!WjQ-mpxHDxLCM_7D2hY2D9w`P@VR)7)LO&v7f$b}J@4$NnYS}7@k2{7=&0OaXWF!(HiRasc|zlKAOL77GkpA3;101iDrry+{S5a?)b^y6sdU4I3E%Pfee`Yd9+Vk?STEL345=R>#UE<|=V+*wOKHMI zcdUHU+muiBa}L@SG1D}%78{thL`baT7fbL@{_<8h1-<#r5u&nXRA2dF+#w-AnephvEKU?YZRUsYO%vrp^aVU%y zLyfh_YXjYluP2TMO&!oY><2unC`=-^hALT}>Mxtc6;*BM*&yG6;V@Mj??3M5f*d5? zv6})p6OMg5|2Tb27C+X^ID-Nb@wS4$?T6HfcteX9Czbl5jNXg=5$iZ*Z1Q{-6gXLj zecy>r?pm}Tf8COp6`7!8;n%=EGDwxCPxl;jU3EAHOM7abYL*~tX5$IHs2gK_L9{F| zkj}|~(D~@!>V6cGVRo=T$Ul(#$Y$ItiQN10^UNHYii7vU_Ef(pN3XA?zcx!}J6J}& zHE^e??-I8$F(O7Sa$pXPXwVOJE$(AU4YFP6g}a=>ClvGhp!??Ml2RaMcI>NkWqG#f z`-`(851f%_g11US6h?RZ@+J>w5D`hTsCyTI`hv?fgQd!)xbNq8ab7)h6o0{tubO2LGDLk3VOW zV@fpg6xFvYDo{G^72Dqr6V26HL#xAir4pSpYZhm8!{9E=zlsR;rQz~&oTza-wH;r^ z_m7AjI6&BV!N?sOT~!?>P%5m}MTMo{Y~82hG((``JmWm%fePRs6PT6^lw>!alxQKE z-7tgJEeoQ5P5S|q263lO8d51Gx0k!?D7~UO{KJu`;z6PWtkA2-mgMdT!XS0vuwZ1z z_6z~859FCm0f-NOgt&!dK(4^y$e%htGKL|Yygjdj4GaJ{T@2lx_oMtKHS*qM+V=fLXhYNHT%q<5{G}G)-srewJO8ubO#rfGCCI-AnUd)3 zyo~9A7*5(j*%gju?#wMY>6iDPikq(I?ju<38shl%pokCXwUF?#ojU;{%^BU>o00pe zQWpvdG6?Ta#6g_?k7(|1{p=d@K7{w#Xa?+FH6Z%0-oiJ>K2ojW>|C_l=83n2x)V#b zRv)0*<{U5u`N=(w9ZFqbf6HH^y@JV@Fo^G3)p`OPN{fr?MPP_?xw?I=GK?;WM!cnW zO?08jgEeoVdV1Eo3Niml@|{*1xy`y>DMHxpexFrq7Aa131_w8~Y^b`r z))b5*c!&Q%D98-n0M6Fi8Tjd?io#X3Ip70Xo(ILmbdV6Bd~u2{$0p;366DGM-#G_v zT3=I6kf&)q`q0I|JFa=Ni*Urjo^oVZKj$`Qb{l2--#7>rTvRTk-yA2|7C1EEU45JL zEZZE4K#2L<`1^D+l2yTi9D7QFZn_GCIUI?G#oy;!<2Ei1w)}ZgCC*JAPXn^^x>+gN zRKGSvNe0k!Q(^-!hqC(AcGT;i-hzuRg``GDJZVNQZcKeIp23PT?B;HNk8Xap`F;2o zH>+pYMl>A)t*qVEb?bQ!>jJO;>g{aPDO7z4PT{27u zpQ|78ImzvaK(s5Y6e15*O4S?icUT0!2c*5xu2_#@(K*zrHlnX6PqmVsac8qNOytiU zfwtSw<6!-w)y{niWOcVx$37kq81QZ(mbz@g=h=u#UYbWyb5Z+~3b|ErjGBljzEyKp zm`rVyu0+q6#*m~vQy6n2T}e*)VgL8xm%;Hn-DqL~os8K+p&##dY~;h+fI7pfV*$mf z{DK7rCLV`?QPs2@0sy&I-(bYF>@H$#D>i zHT^4D9sk|YD(p)6?~J-)ypmpBk(*Q&=^n`zJcEu+fLTQ_c5Bl)&hx%x zVl}VAANET0ev5e?WPlqh-6yA~;{~Lh*xC1&xpBk&h^>JyJy`pB>Fc__q;u7LgwZFKqpj^LJ#kERy2cJ3l^;KP`NIe@ju84D@5fDf!LHEO#M{jzIax0YTu57tC>+`AuJ6LaFiAnCrWg+pd|u zTcuojmU9Y?Zq2<|jFykT&h(8=UbbhaDjip9QzC3c4kLM~eW7WuUJ!d}q1=6f};iGrUikYO94E1gF( zjOadI-)i&@(9qoaJ2mDzq&Ip&r7nr^vm>G-7fa0-bhF8gS9R+s!Lz^b%B<^z7zv1u zX4GvW8ziO8>7jA?4K^`fut|fJK0m3s3bB6DWiBFSd(LWJt9UpUX9IkDq60po0Us&3 z#%~Ax-97}fI{$Q8@7z0i@RoM*z0zH^ozYCw)cl5!uo>uuzebcK`LIv{fA$KAtmZfE zbhc3StQg|AwNnnQh-%`ubuj0`%-b|& z^hbdE)-jrgS2W*1PxA5CA83cwkoz~+S1~T%&u^rWO46E=!28a#GCDTTdUDMKQNh$v zeqcLXB+8-(-4?eZ!$k!tt%Hs<*Tsc(g;DPhAJM=%lA>rx$3^9fX>aA!-dnX;Gji@I zLdK29baLjd%B9v_y7a+KTqn(#We9H;p6&dvgmKgLz%A3?H8yFaA#d zMC;c)rD|+y9&r&`{=1h9HYjF^IaBm$T-|^}uHz^-N-^;s0C3_&f=JR3$)9MZ2`or1)fAk$`0 zqvsJD{U5+ExOc$gZQj55o8f?e0^+%X)RvWSwYF4MGX!aPjtvM#i$@4LSVsj=WxYzh zFi+yn7ioCw1YP@s{NEJ>x(HHoUv|)(Hu=gzXAX5IEhn0N#}srYVaybsdP7ajrm9Nv zaG`ql5!ETHO*fsArkAo5<#l(+Rj29hHnAQ>?wXtTm+mzf znGFfrF{M*8n{od@lKbXG89&(bs zP~&nYkW-b%xWCU)C^)gX++X^snj-eBp2LW2HKgvz!CbNo_1VJ@zZsMKCueb`%)<}{Ijglabm!>?+wfRu`g)G<(2*AyW$!^yJ9KcodBR$ zCU-Y{RSMMk1yVVLl*g!Cxhq2}2HU>u0B1Xhj#^SJyf+RXx$9}8qh_~$hw=+c6u!PD zT=~PlhS907dpkjtHUIi$sPZCrmg4L}<(y*S?ZyQJMb+_h*G~ur;6a6WMAo2 zel5aWc?O;cAd*E|)N&JxOuPe+QA_ODA=hfXK=04KTY0cw{P}-A$w3M)()7;PgqVWXps8YIw)yy>B15ibkVEmT?Z~XG6X`|^cdv+! zmD!&AzhhuL2c^4(Q(7)Z?x^j?a0Ud@UvFn6J8{}fCc?zo8~NsDL~euXZhwlNL_oKB ze@>WFfYM{?hZe^@aIZM{#^~QEC{zUv^Im_iz~wg*uwm?TxpgtHfyYW6!CV~ZB`$@R z@ichb^?P#;H&wwh=j$ly?o(~7tEWc~b>f%>J`;b;v)4-G&Q+YQ_6-sd98^jvYi2U- zk)ER)cXlqMftDh;!G2)Off1ZnOMSFlT?F^a0Z^=vi&XEd9zs<)!eb#%C@|*Sd)+*? z)cKtljik|hW7YaNdnQK)Tp`qKj-nT27;_n`!ctj35@{@Cx|o2_Xl)RngGmbNPJE{Q zeenuTWyBeCz?KO!Ce3hVdTfmCetT3&JNtuKDT*mm1(6 zo33EvnWmae`_NPCpda!hW2^`53!+(NJQ@d}L|PtQ@Arg_1%_R9-41m4c|(!<^MiL8 zAjv}IpR)H^(})4(UQb+k%u{Mt#3RKvuY3K>9)H^%aOrm4Qdakk@$<8%Ex2r}2u7yP z(tXqVCa>rxHoPSq}HqRh%PiagYXN@j=*rZysrzDlj(D<~?A!rdEgLAp*=g4Ho zjNXQiE0UBiK}+}E03ooKOI4utSD8GsU^jRYiYm$wtw7yy5; zM6FA53hURZ+Tv;y*m{KMu)%;cMo*B(?QrVk22C{sHtC43b7Ny!K7*4HN4cp4>qm%_ zyKmr=xv(iNxM8RMWH89S6-3=mv^sAFP3=g8c0iz=q{Fi--8@f?x#R_3PC-nePz2^+ z(^8iczGDJX>})GlluKSvg&N&p>=vt$3if#=UEk2HnzFG{!f4lrHIC2dXVhBpsE&&$ z>ix`)x9pOte^62t|u;^IqacQ>|u9|3r z@^@QYqQKRhagT#z{Oi-ju0yNK3;{bV1&BP@%ZrYz{ zHVfccawy3x9sb+i)&E6+Y(xf}`rn?@GOFcS_Q#X)*-fJmt_258Db0UzS*oSK+?&`# z^d`~K(d|P7{)ew3=4FjGSgYOlGL9rxjW@UN{=WB~Xj;wl)GQ5Tpjey{L|5d2Cpn27 zNu?NTCmir7Um&m1YtsN1W>R!6rB{8M z@?WxvO&hAtZ(s{}NA#-tkhFfX6E}Uj7sO1-G+$Eq{B2wh6}<5Q0r`kaYy#BDSWQc8 zl8eiAa>|fl3e*Vy4n;2kj?L>n{j@o|M`=eE1uUyd3!U=YHMudJ$6fKE>W$=_YZgp4 z$iXkuBYWb`y`5Gpr-y@lg|XYhY4s7l83k%sOJ9LJ0_7t-z`x1z?~=1*Wqs)KuTSiVK?Qbz~`X(lDU8p$&kRQ>p(7P8lNT@k+!vfkayggtihxCue zxiPqyTVFh}HVZeVB!exUvWkw^+9N4f1Z#qN-Ek-vLmvv*`yBHJ8WIR&O@wp)2#dQ? zOhtGEmcjX2gbToE1g+LYQ{_MZ`LX_38&DBf3>;&2e6(G@)*hS+i`$QKxf?ymekyfG@wgMs+`D zZZwjTx9I%o`r}7IHI*8Od{k+b@)TA}hhYY}N87SfdD z67i66?~`N~F|pY1lTe&FcTI+9GDL6@Q^r_^yX=ykwNVU5ztz5#=2zNXolK2dbaI1# zW^xP9kIPk<4s0vXLIqI>za1>9ic(m>hKQq8kHFQDQ7YJJz-z=kh*~tK8E%9%YMhqG zK)i2mB=vgBc}dYCw~~&smbl2P5viVUSdDz^ec8Ml=2jsKu~(7fuJ(krRJ&udZ5BK3 zNiFfT)NUak!Az~QoA^|9qz3IW8Oe_{4UKoCdnHwFChy4he#EM4Z6Rga{mLfYD`-%l zZOKY6D)F^y$y5Xy>7LBInkmELtsHXQGLUW{bJheGFz~0z0%xtYtM0KzjMJY1a|afh zk+1vb$ec~Gr-YAqIZS1WxbMih3O|ZR0jfs23LD7O%K$QEvsT(#QzibCQP*YixJO|? zFGQQd4`y9TW!sJZ4BfE=9m=g!}TMlqkMt8DD*!mN{1~rsbDrIUiE? z$Wwr-6g_JxoAgcjH*h0CNs)y?j^5r52Ih%YXnWSK^_G<`O$XyY&P0< z9~)W(_q-7)hSd#1xy!{yy{dcOTZj_`sYsk%Acj3^9Or5G$zO?8jv>A|#M@=U_8V}c z)Jd>?P0lS<(nRc)=6^B1n&1I9xV0)oMtp3r3Z6?Y+(n0XFduG@Zm=DVC8JS!qqx4v z%iVjsNZ+jILbKWuRWFl3b^Ex*)$4sOqSmgbe=Hr7#(VE(d5ly!2K@ItPnY-@KcwH{ zq;xg)1Vf`P)TE#&Y55X|-%mDP*91Mb7KC+B*UD#)4{D2$9e|kV2kMwux#nGt$l0DH zP#Zjn9mYXVDr`q%Cz9w(rYuJZ=W+R?I8qc!*n9_&s+U8H-Xu+bhdKhh^A zx^ziQZyFn?UAmAO^mNZ-u}q{}ZPcAu*;#8dfY^_564V>drxReviB39oy}WiplKF7) z7gG~4Povh_i#mkC{ALwH3WH0=daaASt}Q$zHGqB^0j?#NeZSq8-w5U!=KW{i4;FlfIY z3X>VzrhUjh=Zx0TX2tGl8hM7L@MJUH}Un{EX zm@#g=O!EP?Vw+D$+}|6z(S^uCGlZ=aGuVYO%(Beh%^A6``R1}#@9)*LJ;_lh^mkGv zY1|Xx2?3F)5T>wQ7jXRMia4-OOij&j^;33gH0jONRES%0KBajm$ERq&I5gn{y!mbW z;5UNN9ucTQr3SFC-2?zp7PAVv|y$jp>uoXOrl*IA;{20SXlALYvfYRouQT##`(`u5fjLUrV%xfAoLOXZH znkK6xlG>+WYGz#DA`e0YSjX|21S%j84D00F0v{Kf2ijW~nteZHW&FbHkqJP@aYYbA zj6Hf!i`=g)WJuvJnaLBP3JMBv|GqqW1XI}&=;J z&{K*0|N1=tiy`TI8BgvkGU-(PK)b16-C`$Bs@o7vObQ^_ayL-eW6%u&w!}u(N?R}^ z=rqag(%BR_vEY08qTi|nD+)mc-SGOiKS>b~9MRK4k3STJ!w}zgdbZ~fWvM4_nI5?3x|QX*$kPYA1A|J?0gLK% za~$uC(hFJ_P%AzxKVu(6gV6*N${G9XT#l4wrgZJB+kDE0buOWa&Vn+0jP#bN>|03(G|DtNc zZm%LBpma;BfWXkwB`DoJGz`)?bcb{!-K8`P3?SVwAdPg2%+M*_aQ5^5bl!9RgZ-J< z_jO-ut?$yPiqI9!wA~vZYIT1{0O@#4>}Ue*2KC;k3D6pZT@KoZEYPBcey@PiKO{s3 zFV&eWn)JuhNL4iOVUsI(Kr^D;m~{$pNm!-}y~}D{Vc;0SDi(NykJ4s68=|lHI(@a_ zeU`VD>FPgeyVt7muM8_{DlAFjURI4Fjz^u+=LyrWMxoS>%+t^2BGpryVjM%3nlL=$ z>_Z0T!| z=1^YJuGEQ|U;ukmR)*^PECPnw47H{A90#!q6H85R0PTW@QHOdn#eS@xbk?qJcVy8* z2@Gy09nSh-KA!)_Ku-{@y7x6!hik-(5{PLL>1Qp? zG;oyn3@2M#Iq`+$ltTP6w4JKX0@BGR>wjb z>=mt;ri)Xss`SaH%G#OvRz|j$-t>^lhPoGX!qV4$i~jTs^0E)po9Y&s&R}wl_nm?y zL+d}}?tNcw>xN~ZY~f#Q{AS0jMyl)d;nv<}d`eZlsn+KUf)&iSWc6$F{oMxlepq8A z4W;!{<#`#O7vhl2a~s08EIM-%u+_I{1*iD?ThQk0U7{?RUrjz3 z=$gQ%I|C6n4V6u6%}dkr7>5CpKP`_HFu;wzGV+0!y>%-QBFKP zh1tJ}71e^#25oF4< zWD}3_CmXK63#Cd8%HG7j=Itss^9cjuMC;5%={$}!3ObdL`(n2JMW4X81Ip0j@+JbG z>ow#?#g9of1=W4}VnkB~c}ug;+FOTdE`J(qQt^1KbtyZ#L1@qq!=#^)pNKjA+34Y} z#CzF&HlAz|npn}O4@G@f3B((0>k5z`9OFH}mt$C z(NGng3etDwpUVVQYo>M#iHD{|pmf-S<2Z585k%g>rO z3Ho@e>0myXE~6Z>*$U!4_scKvT$v2AHGu2`Coa~%u2ud0Mpsgw$Ws~<71;zf2}y2- zpU->z{gLzSKU(K;naG5xa(LhGv)R6mE(j0Dh!GXBL-giQi~YHHjxG@8CWybkMc1$+ z`8grl8DluJmykUCMwnn9mB0MoK=NhyrVkhUg@SD5M`*;AZ%7k9E6@XB^lZJ9PDg9% zYUKEyg=9ZI0H@hyt5`9Ed%D^@1s*)lzXNFy z?e_wYWC`1YUaTi4>$li)+{LCt42MVG)`deGN4!JmzkP>#XA&l{4y8obS9>PV;_u)Y zrd0fE7%p*ifh&o4J9EEdQtDZRPMHeHi&-1uy}vcj03`=hY<{con}cBo3rhuYIdHGP z+(DsWpqTDV6pro{y~ijAr0e=zGnTjnpA)YqLUj6>;k_gmz}*%)xyl(r8_ zIDT~c4Qz$uT_bmg=G^x3ZFS!3lRB?IadCMM#;4>9fty|5EIInVz-AOX!Dt=7<+7QQ zYGD(V+X@|k6Z8_i=BWASA%i=QVRA$i*BXuo8;hGa5*sMuVQO+de(A2m{&#CbN>P!D z-*FXncptvp;t~J)^3dhznCMX4tLNYrVz93v>5M&SZ7rf8+cmtDl+(&rXVvXG3j=q} zzN!G%c`~+c#dY~za~A60GWs99aB!eenJ#~NrDUrxu@CF3qDI6j5k=mbza(G{XEE9D zXySLsVnX_y=s-lroy8Z)JeE+POuoRuPm4AFzXejq@L3P-9o*SS@na5 z7>0ir`$CKPS;wnI_i74lc^N-2|7w!<0?)(V?fJ}@7_JJ99ID27RYlbn*;?b`!TPV^ z=gmSetT;(!v#h_I4HR)GHfW|*W$K8z8V)N4X(<36z=~llrR>+1X4tXPOjhnk!1>rdS=QBl;AfKu z43?VXsW~^&wR}*TOv_<~Tf0y=pyiO0e1}_A*e3@|OJ7@6EDlXhmOhV-!C}^o@^hr0 zHYhDqdkT+htLJ9iT(nl9VJ`N z5rFbrH}@kxL$Si}n^Awt4#!qjl>eq1d154p%gjeTvi+w!Uy;2ZPM8~LBqp~gW48VM zR4bXK_EGH!P_Mh=H&kn*?&7ciJ8z?Rm$Tf@D6C;fu3CDtzRW@Spq4b5B|EsS8C1i` zqXp#4^5=V&)2S9>2XlIp@0v&78;S_W?rs#TTs|Ag1P770?83dZMQ2s22O1Ve=efVr zcgmZHRAAkvd2FmVK+Cn^dl}SCxl6&r&F#SD5=&6tsq)E*a`yO9k5Uk%K%w@?B3tGD z>r^DKK%PPL>CU;Q!HRV0xEf5#VADMttr3+IlHE+QrRZA9_6Ii5?CH&f>CRz!a3VL7 zGnscByvWs6&ey(}Z1!=0TajEft4srP!m*pLl5{)hSXuc2{_+-uL(HiSWG{L>FJB&B zSUs=-M@;|HY~nGuly6=@^id$3Cc*{=*>Rp}j;#BL%nL&1vEV#Ub-FTUwl%�nWL` z46La0vKrvn!;0c9y;Zu&TI$5{6L0r7h<}dI%YiUsMRodK4_Ow_?vXq)4Pi&0cL7dY zW7o;vwE+4J5UJlrtx(zd9p}9nuzBkA#fuU5e`Cmm9xLvT*01g;-u9N^fI*IV@Lijy zvb_46YNMjE7T`S!b+*A)K?D!p`|vk=YzYC=ih8dtd<~o2I6i)DUQ6W$CUUN!TB&op z(5X?f<;nuS_~CjVM_e_V{MFnxu~n9$c8fW%tZc!9qv$c^OVkJi5v@^V!%N9$qj-y( zv%S!Y_f{s}{H9Xy4tgfQj+8#wXLpzdYCtUESxjOMFu^jpzT-;Sp!P8Emetps~Vigtmo z)jE_vdkPb7ZbanOQ9otBD2GAYI)w{p#yl7O6b?WVm5QQldnB0hU{F5;=%Uesad{?5 z;OGg=3zP6*Q>HYywfvFwX=P_fU!h814tQ)Vmd=MCJ?6Y-UcXYOxNbJTh7l_sh^=(P zFtPv@EejA@jl&h^SxbN=O1J6H>^iU!X2d~Xy}VRab2QJ>23+!r57V{M@T%kxgpi5d zmU2@&*|)eq*yJ*cwo&e94E=-gV9NlcAzFk79wGL-G0{K)JTA5|sW#&$-~4k!qFh*P zNinYk2)Flu**8cGc_oVQ_go;LM1FKi5O{2%?4OhZO(`kRfow@zedgDY<#~V~Gq^98 zX3`pTEB|+2@=K{O0L#m*X)1oIELqDAPn8!^G&ofiuezwQi+|~^y;J%&m@1MK^?S&0 zAIN3KJR3ls61=;OILBxqMD7J99mqk^927717XogY$IE@MPt*z|y(oEYB7VAAhP`^- zw=Af!&|=QZ6n98~_k*mN1T}bI#gWtJ8N=Ub<_8cIBDU~Qvc83Ai2q!?>MFJ{B%o6Q zmM*qEVkWUxx!ven;0Ldv`1roP_sYEdiYw_Z`3D)E*U5HVxu3tNSYmpqNzYfm4?6J9 zHh&nBafjU@{i#R*vC2)W&hG8}f3}iG;cwr*6`1_%@Vh1F{_=u^Rr~HxSoSMQG8Eg? z%&hmnhrBFda{SZwo6Y`zWn_Wf$>U|Xuj99iT?UmrQDAAo3kL1sizh5l)a?dM_$hhI zR?fXO+_i4c6-(hwpgl!iYa|loe#^Wv&`^I?hD~buv|o~2A*)g!7)dfKuBx6PjCmxW zEeRo=gfE9Z*t`cXk*i(*Jb+16DNihb?%qu32yxWsu_H-A>y`Bosk4*k#cbH_t#j=2 zinKxq0ZGEf-wUjn_LzbY-MH&k3#iHVAirP*td=C5T(ob3{LfR@bpS~=EUnu7W(Ua6 zUP{a`&jLB4kMLvxDrUdk9gn=u#N3B(D&s^~$PIM;hOVw4)$bVVqc|282~6>PZ2q^k zwq#e=$LBj5lg6ElcbjI~4;ux`ecCv2k!y0r2W>rZr$*UlFZ$>?L%ROSN=kl(x~$BBuFu~DJz^hEOq7*X z-aM|e{46cwd@%ym24(c#?U?B{dXg%X#6vcIv1jMaM3?fuZE82?zPub;n{5~j;n_lcZ~pl}*(fw~ z2A2swe!;Wo)VJgKeNycIkcW-7|f4PkqCCRwb`CW5a zGC$si^H%W>FZU?&EcL}-Ut6Tnt!&k!axF+GGTVjBPG`;4*Q7_P=D%+I%&IL^rq<>! z9d8)37vwd20TU+s>*CA$bPJEb_1ME#dB6YoZ?(V!oaD08-N_Us&xcFZza163V`>z4 zC~dy|W(6UQAO;;;=#jpLS6vhrX$tMwNH3JJPN}}$_#UdLl~a}POzJVFovAGyYHgCN zAUmtWF3ALkHAoj(+i8X{GKN~4t7p#wD;P~t)YlJ0dsb$e3G9TVS*ayia)K!qtrbu~ zZwehqK9n~sbK0ayQHJfq^bet6rE>21ddl9~xi6|0lxN%(pFIp8{R}Le@H%&AZ5R7J$ zPqj329q3a|mE~})R4UaRRyIeIfE*a7f2B7R8mG+`yE<}A&xX+0kCL0DBuQv{g<^Bp zxcImHgy$=`xZE@j53CGH%R7&Q*3 znJ>lcSj8xZ%U+$gp4+Ta*b~K8&?i>AE88ijv3oweThwbOu{xr(IzrMTwVHXg z7~$-dqIcAYNu_qN=C|Q zO^$V5kph-OZ8NZZaE&)-g^o7<9STzMCJ|EUJDsF!#C>Z&pkF4-M!yEj~y7Xwd zrGIn}zT8Io$=8*9uCO;UG7hrEOxD6C2;d9vz6*`rM)OTwO#1FiQ{w3=-;32*%>{64 zFeyWBn!d%5b4S2t4XtMD;Q(}cIPJ0oHk>#3>sN&D_30EmiDV3x+vI)qmvQ~@Fep9? z=bI61`PK0zG`SP_+TXT&u>?8B(3WA7ho9WEV8l$p&=X0ws_CFMm9!jYfY&rF*~&4o zsIW~o>g)|yPuBB-63NTn_9lXTWrM(weuvNA!i2l#DuG#Oxw!6E!b?OuD8-OQoDOB~ z`#O6IQ4=f(!_})v!SMS7DlE;K`Dj})34=>4#@W$4Stbt298r=FxPWi;KA6e?E3s>S zR;wbYG8oa3G5NPHIbdi*U@(dTu zf7hVTE0Rx~7Fe7h(kg|6I^v#R+c1}zzcO=Ffmi3cKi@Hd%kv+Yl~DE9#=$Fe3Hc-?u;M0jF?(MUEr@@Fu(d8XbJ6>np88ue zU++pBpdfKC`3%A}J*m3Nu(QLKm@8{}tOUq4w=nJFaX?MQh@w3`gJ03R9)(>UMrc!D z74|%P@#=(dP7w{EMo+VhAE1Bf820bZ6f`*QnDp@Q^Ty~uU&!&J!>=tw7QdtO*|r*^ zVtW3h`XhJ?TwLw2)WEhkTgCGckahl-Q1fUj`4m;3CgOFY>Vk*E46bxd1K5{V`2skg zv54&q;L_Uy@@kf!Bi5E|8aI&Bv!_G6az?krWdjU0>rp&H$84zqY@U&<2e-} zV9x~C1g4EH!j}pr+2`#|zN_p@Qnc;U+|JjRzSnn8v7*I&NB-LPGihC3#!lE4=5T)fn|lFf%!*6Er9Xb4+Dq;AM2}!aGM_Q@&D-X z8Xg9^-radTGalr&qrW01ck0B@7Jec~K4YRi#gmwnNrAhVC} zm7hKsK%qS|y(Gr8Dpd9y(6tfcY*DXtVD=~lM8D(X5W`N1yA_{hud9%zEPx^4p0k8U z@dQDX!i4x3cz# z#SgK!K4d4`jSM2o!g*J`!B|;y-+q~kMqHZ%R4x093@nyZC(DNovIQ>=$AF3y2)*y2wC`m&fZZFHW2_~^r9KMUO(v2`>i{`9V zl9T?)CbP=OV8y(GKzCMUQ*jvD8j*s!;6!EWs?PVtU^6yb*=Q%THxKd91-ON6RSj@Lw|xrhbnLoar@0%W={h67O;2`Bwa|}6$B-;0{^&gmF`7ZiI&=`mzA!%b` z&3w~4QN>|1Grc$%W6_R7VR)w&xt(Tg0if2DbA7g|fofr7XqrcOy}~fBR#rL1hIqka z-~t|uTHB85vHpxRJLY;MtaZ-yd?`yHxyF5x?Yu_H-so9#*D~oCwz&`}Nfvjpx`wo- z?Jw0~n=?C|h}6*$>*WdXOC>8aYsCu2Npm~NE#+cYICp`W4_0if)slK?85ea{24sp) zxLx>5N&G~hax?j)AX#vgGgrNWKZYAYUIwlp`LrM{kx#0Hb{_V~j!qCo`!M%^)(bER zL^iAKrL|7NvFH#N%m>|@cUWdGlk`JraQ&Mx`utLba0y8hnKht?OVO!0aXFd`)%$4;KUgk!2ZZa=z>C? z^`9_a9Q_T-&ifhf574Ma|7ERTPxL}I-p|Sv2hF1CmeJDJXwF`XUkoP-q)DGHP?P$3qPwUTv-+`PFVDXX56 zRa7R%d!vHr9W~2U2#4Bm12v^g3@2~anr~?BxnB)^I_7kdS?+S^M>xG@m+wZ-m*PuG zN`gf3t_pulMQqu^Kq7)Q-3cR8a` z$3%&^R#>%wGE?g|{CvO^F!f?Y%&79SuOQSW4fpp;lB$IcoiJ63USM*tng6WA_a``kNx+KyHBd92^Y8K?T3lnh>vUxZlCijN%r409DLM_*nAiF z%I&SPZ$Gk6RWJtO{o{>P=dX{JZ=Ur%+z8e5Bcel64i)SA0_I9ni zLkI}Q_EVB;$3a#+kuEvJpbn*zDZtsfOd_sM^N(eNt6GcF1}psMnk{c`elTKOGY1)B zi3}gVUjl;mBeM7;G_v4?_^-D^YE+xuZcwA6l^1I}_wNa55YhH4bl2C^Xq>>X@(><+ z=i_^G#!SXx^kuClwBG7Bvel>N4|M{HZj zrawSL16U8Mc3;}Z9h|(?wza(~eWx8lklynu{ZDYn>Mrpxbl8H^jU`*`6~AK? zNgHX&XXh8!V!IXImb7kLf~*fGnqBkW$54x-<{@ht$7R2EsFU0JGGc0e!LVy)kW`<` z|6w{WTl5-JXN+10e}NxFEW`Ek#Vf|cn^$klP?1rw8=E<;2CXhxff1^{pU_^!VKKZB zOU#d;^Zn`vcX9dKxav`726$NQj1U(jE?}X;*SCz9bgp|K8XCQBRPeA@TL6IvpUOM$ z=2vS9t@tf%WDn6TD6tYE+`2^}Na*>s zt%0aNtwvri5om$GfqB?l$3k=X>zPA(R_?2l-=9i7zDu52s}on~pb|zlQl`k4>nSb*$mqd z17chH*GcMU{MDb0qgh-(uu1uSZ=2>EQWiyteBc?XUPcg->r(3>$jv{lzCE3sy%?DN*kBK;n#3vKzr zAyd7qMXs8Ufrc~p!;$`Xi3x)pwpO9#xAHS)ooIti=!Gk>(m2=rLKmoW=f1 zW|VVtHWd196Vvb<0&V-48V6v)RKDCx4hQxg!Od%T#nIr-6DvNS#z<6>{r=6`h8z~1C80T)ndGINZ1O%cA_HOkFE@-#Wpp-H*P&?wSLm1K3T|t||eU zBY^v8E+_lOmLaYrE&ARdC^!LCmg2deJ7}M^`3UUqP2m94%JWv~5u0%DJohZ`36hrK z_6}grod;K3Mbi)m{Qd#Jub4fK1D@g5bGWdbp}ej6d#b&63=rzNrtl7*GbyF5(8joscYu!P{9WEpaAMYfDq7eW5 zyfGZbeewAhbwpIWyEU*v^2AZsbtTBB8{ZNTAh_6##Ya`6% za+CLP8NY>Q{J>MUS0M3k0%z)jd&(G20B*=*N()G6>?(H4$>mt{XGjwAUap<2H*0#U zjSx?Kkb}nM)|cG&Opu)d$ZVPJt;g$TLFnX8>)+L=FV_@3cSGiW2p&9W@ z9+!R2;u!K@lRKz|T37Y}2i@L{RS#sq+WWa>_7joiq-1jO#P5D2oc>g6W4*E*hfx=M z-GDJ=xCOB`WM~&2-p&SdZ@3azLT%YpFZTndzg_Lnh1~z!UTgB>+J=~%&=3-Y7q^Z_Yi7O zT>Yi`7z8V_*Pd4jTWVqW2H{`s4}acEh(G*bdI3ySGEqC^Il}_|O1}(aZX^63azn0K z@k#$GTji@J;LuAbKfhX9JE|xuytw>HG!^$8Q#hia-KtI}&I~K6z3od5L>Fy7cfy3J zC#2_7j@Bq7BTPuFKW$!CrRP(h*KyJwT*L2+AyO7S$lTl7vL~1-g47T43BgO8S3Qr< zPfvy(qXk6%((XgQ$lHoTa}kz(bG-z3{4&|p2YsK4?ThzA--)<8*OS$v_A@l7gqrdW z^aIRSc8f+b`Eu#eyni=q*zV3%$sHF;agy?Xtpwny85tSs<(hI~FL%dJWg1D|%?3ku z*ph6CMoglUcXGLdIL1kq8icn&}(NpY;DH+N>u8q*WWW_aG3VKW*nA z=H^llch@{Y@HE9^^Z!q!!I69rOYFKjM#+db0snD7;Nxo0E6PnBLt5hT&>u&Kl)USh zNKU?rAM$ z++|dc=LXmDR2scnX^{xUPGC&~7%+GKMG|P~oHV%9Aqn_UIys1`SI4?fspC%0Vo=f*4-z^@zPjzTW`v6jW_V+JtOj!iBN9PWi8(|OUbjBH{r@8X(9tb$jj zQw?$#qcl)i&UO$SJAM`VuH*NIu#$xqQw)u;M#$(LJ=LGqOr;7-P8(Ur#l}O~*rA_g zI>iE?UXxn7H4fQPgY!s4Z8*!-Ov9!}hWi-N(-+?WAx~0t-TC78dzA|CIu)r4ywaWm ziJ5C#S?ai#Wo()`W}hmYoxdn4$5xFW!(UyQX&U;!*2)uoOIc&C%I3eLlfod%%H83| z<;^>yG?2Mqt;5E2P`=eu?(Sdk44gVFRpm=MJ^C$8S`b@p5JV=WG{WqVnsY+hY^_&H zj%G7wp`3;bs;Pz8rG%Z08%Ih=vGTx`iL_tRAaa#j1$?*P2RN+s{GmY2i?s3k6a%XG z4FlDq8+v%^==$HbV=6<=r>#Iq?X5kUOe(vHLxuZGkU^@Lmpl)hidWcXd5>ze4dCkd zUIvxe0Kx2&lK>ltb#4`aRrrrxiYO(fu1(3;ZMefGZPaj!SD5TC6^M<+nfUdCu1YCt zO1m@^N{^mpJ_=#wTJ8S<5>;ZISZMlnrGczJ! zO(Jw9UD|OlFq&aMG*eFzv(WnKwON$XvQ;U&+BUP3RYEE1tW{_`#_d}3UfMRgr=HLz zqn)$rG%eqD!}-icJ5!nM^mkzyv(jryjm`!FHya9#>NqT=*A!F5McU&{eynd>>bZ(7 zeQ+?`nScFrkRtzGMfcr4inFElYTtDX5g2E2wr8Sz#L_J zqF60@|7nRdztP;Gg>OtQYZ2L|Jt9F3y=NsImiPt;GNY!Fxl|U|q;utZVM1C8sz2+= zIrIa|5vQtUV0MYH@j&+}QgQxCvmDRC*zEP3*yowS8>;PyUEVQ&`v$a;wrz9W@85fc} z(b^o~Z6A|t64Je063d0~N)vE(3sB6QfJ-}GWh0}@pg$_)p>epo)b9kKqE$2q4|(CTroKvxKX9Fg5N7YLjmEXF9#4q zU~>`Od!PM+1pCarYj~7~%+d!v3~)68C#t3NR|r3u%<;!9x-g_(lOXlO0WcXa zqIEBdf#Cq?`d@8-!`5`csR1rK6UR!S`T~h|A!23&^0(hROI*h@c*1X%ypKNv9BBU= zF<@P22xNmDaaJUY!~HWnfTbwss5(w6+Ml z@0*Ne3*O!ARsSe1-rf9Ug%0YyfcQVwS6G3RHU3=4?Cs#Yx>%Jn65V&$rK&zw* zoS@YzAT*UsLur{}rNdmhQG0aFGNl@D(=`xJ>&fJQR8$l&rYM;6rllz2P+bn9g+=%b zhgk-0HH;lSBf^zScRN^+p6Ucv-n#`vTDfZR_o~_NB%uF8B*7Z|Fil7r0Kive)Ok7* zPSgN^t0`PN4@?zwDB9wy0yBEs?uUbm%V|TbS&e&rVx2Ewsa$G6adZCZE&@G3yiY3d z^5}0gc+f*c0>}DEblYk2MnH>M-Pya@Xcf+YJn9m5VKYmg$mlI)sPZhUSisj|%Xp`a z5w(4eobJIxDhFjUgoDBQ1g$Wz5JOKJr=^$epv5_7U}20&_=Ufpvww9E-@ZTp^IK=! zK59SIjOxyeX>WGa>`x{ANXB%Q8%{s`an5}I-kiRI@!S-(&DR8@7|dBEb@5mgLGr86 zL1yrurJePg88Pjb@NJ!hFlN)PhVT$&Is4kZjf-;}+F|f(c6P#&@z?K1QmmxUSMx

E(9+I$(BP{`t0(Eu(kAZ%LqKgAA8FXZVVq!mixotZq)-y|X$ zGyHq$vkU3vT=#$f;;i!07DVuf?Zi|YcaGvZpM0t2w4Y-6#CRnJn#*)?z!(s|(M`z} zWce-ZV~(*-c;IC-DQrp7V#%SR6U_})*Z@^b)5}0Jqg_^7Jh@tCSff+S=`;B_i`1y! zQqJ%etD(Ij=P$RE9Jc7MPnl>cdfqR6X64wO4?`zksS_~sCN*=7FO1nXN}xA%=FU%| z@LRJSETBhk!{2+%;f^{gB$E~=4%NL$j zzER8TuykV|Y{V@fvbcL6q(DuSzydM?WADbhF!^xbktI_9ajC*Fx z6q>H))GzjLLkTMIES~oJ2>N}Yjw|^P4o78ZAwA!5=5`+(t_sKG-2-~g=~2n!h+H<7 zcPBIJ^9LI2fGYl18LrK%9(x{utK7ydPdbz_ZX$e~g%!6YswjfJp@~9AL!MeS|PjzfYa}49as8_t#{kOI4RSulqie z4{ysT0%j3cVoNI+lE1d2_qEql3Ug_aS8hUncHf{}BsOjdFuyD!%9sZs20-4EA@Vbq zxI{|=02JA|cF7g<{*@F?_+FG@Bh(X2VBdHoVuIr0uOy>>WF~*e5aJLS`^#Ih;5PE@ zDuK?(2=DEN-U~Tk=g~&cf9k^}(i{*AbWwVF3dg0Z2B61OBSP@W$YPB@a?%KyR8A`I ztsfQ)_Ri!y!Q(IVLvTMm?7MAsdfOG-N`D9_I-AfRw3p`cyG|UrGS`X3uRl8*B*-hR zz*9C>|1ORv^T^=Jr@fb6q13WATS%ud6yk)a(iX>1cNgkvEYvu2-8MMS@8hok)m<}Y6u6o{AnRT?e^w$aUP<}I5E9X)_>?WIDJXCh)5z=Q8|b)= z>=35r>s+MNfG@t$&3_t6`vkD~u0V?iRL6vgaS&QzOy8n9a1ey~71eO&kaNH~lsVE& z!@AU7G$lzu9W#=FSo?|YGc&E>`EVzn<0lL^VbLAf@1ot&qjUT^Um4y zu6xa-Ac1b@Lj%+Ns?jm-kVSn?T-C)f>!l; z`j$530{Y8JKUcRy$SsiLc{}zq9PJP`DG2KR7oQJyAr+QqYN-@$a@lknQ1nFzq4YnvnUXY0>!aG+cbXc86?pJZCUs?bG&r0kxaW zT%ml*(2(if)gQOxMqicY$dn533~NU(4whp|#TC0XbG&ELMT|!^p=TDOhKg;Mz}gP= zal2Dm8Z!{jiD17!3)W}3cg^3B(RfITiT^#R04h&o-(}a8KdA{QOg9i(TnFw( z`iS@eMx${WW=r6%R2f(kf?y^j4{#wGhA!m3-&A10X(zUKoJzd95{s?Q`xBY-?IZYc z7L3b)PimiGUFJ49$1>Xs$*BMvR|}v3d1uOeaZ80ot6q^mCs_*Ui3zHf7pzL6q1?;q z1qwapU2Bu6=9D@o~!*6VwqKm&0PITDd=pP(Mmq*l9HdP>oAlK zRu$HHDwI;u_iI*3mv1?9RO6=B+moZ%&1#@V=qI^m(X;Qn+Ga*`%66ID-wZ3}YOQ~B zTQnT~Ds?u`n9V2EAHdz1hK*+5+&>BDi#lzC&@||itvzmG+~GPP@4gQ1Z!d+q%^HH2 zipVO2&%UU~GS-h8er}H5Bi|^DEEhINzmS&Nu`~GU0Bl4|G-DVSP9}kh86gU?k_xD` z-fMukfUeoT!Dqpsd2gnBIj=q=!p@$`s(^t-ErW9~!zB-`0oCVShS zi6UyMt!UFLFg~5mm|6vSzZ@|}+iTmf(y4iFSe{y_(JT3(!Zym>kVuc8yGY86DDwk; zZ!xK>mAyns10(n+Z?S@~SxX~-iE&!?%Ol;u-|f-d#U4=UMPf*Vyj)I>|3NKJjh~pb z9+_qRL^`_^#Y`Fg+r>z+Djdj)Z1#A$g(jQ5WZ9cyVvh72^-yQELRICcb^a1nG5TY# z_gWzSSGH02wraUXRl}T%#gtPRbC8q2Loo-2nYzI&$HO5v`e}4h%yzTo{`JXfx{s!pm*SyGw4tT>s(AO6^rqIneBH&a6+Ul(jeyb@BXnIjO{$8+9U$HhI=8{-&e8dlZ4Uech7tuZoVra7jGe7A{uUb6LkC=82Rmq z-fGj{q$YY#IVjT@#okA50IpXLn<7e&6|SVE74ugpD&4^2PC2=%Kq%w%Jr;TDl+X^n zv-bNt4?n!f^GV?-9z%qX>>UvPySYS*4jFdOEtrwq_uMFY2M>~S6l(OY$oZdqehU=v%sjr| z`Fz-jB!2DR$n=ITB8~qIPzhrofm*jMcxQyScAfJ+ccry|s`a?d(=~1B_;(0~CzOpb zxoKYZS+L~s8E|HBFrY^>d~kOen48G}i{m+jc@i)=^l|wsFTZp}8I@wj7y32>&D<;a zSTXHhjKxG|&t#)WqApOAN3G(7ZGF?4f87g80u*z+O#nOVF&MQ0#KHY1}uK=o9 zJnb9;`sWPiulC0$c;6-ET0C;nR1Qyt)(EUs4!`+c3gH2ddrGf~8N+L>jG{vi0JtmR zh^K{yLSexC(F4)V0$1qom)RcIwQRYnlHMj^N2uT4PC|)#z zOAc&z1lwh`jkBZ%%?7Q#&}UW|l>oh4`cl3F5bJyT1Sj$vH$MZ$%R#lIeIoW-!AO5) znvf<>{KG>z%|N{0YlWhp_X83{gRn`Ln5`-&)o{?!7YGR7EZJd^qH2v3w%@KfHG$bh zK0h=2T}%hJBAY}*(FI^R*dLQ5T9<$SN)?Igta=`aV)%fY#%^W_?fr*x_^&f=(v*tr zMhYDpeJ~Tj{`^)5g*uFgPNK#Y4+Lt8Qk|VAfqs!(&+1m zKa`WDIvW2}a0?X^X(R2bYyJIBtT|LW8vL*ocPrxK8=hWDmrc#?a`%hg|Ir>VsKepC z473L=k*Hqp|B&^b(Qr0U+jc?_1c@?;9-YyHQ9?-c8g+C=i*EET(HSKXy+-tI^iD8( ziQXkfiypo6UH7xT^{%)4u`H8cNoMS8@AEtkV1eRzf*w#o?r~|59G-BT;X0Wx|2t42 zJ`7!H50KIyB#EVa(-Up;Ek(kCA4s^acYa<5P7#Vt-@AkELi~EXT}ieyX4ucy^%KZF zD{ziarJjw^p*VPN(F1kLrnl%wWjTMNXr<-%1$m>q|3OdwujR>qp;}4EJz;c*0`UI) z4sAlvEpKgq0}4VNUiZ*x_M})?@u4YFD?R}`99T55ww+vR+r9$gu*V{61STDR&&3!Y zBmEHI-^D{_I;P;wQVG%R9kQVfimM4d{=nUN?v zokX$d0J{qf`pcb?;P&!qUR~WDoOfjv zsj_CgsRwWViY2>6dcFzV``tg@y$Ct)6C&I37Wtp3GI-3Z?6gTC7d5mt_cM=mG74KsVi)QR=~S%Wv}?a0%bpQHzaBM|+9|j2q^6 zPgfw1D>pEmGaN$q^O6i-PDY$C?gBAX|Iw%;0rNCx7iCPmqNd6VA0zN$)?|e7%+OmmP#NYP1DvH36%T>vxg=C_*SuvMIvw$kD- zRSnGmO$}$IgPN3naj3D|rf%i7DtOp#467|pY*5UVNw{9uSx>W3frzTi6vKt0?V-tE z-Ank80R#YjqfdIg-U`j6GnO^BcTSU{-u`;BB+1NuR)`w%cKWiBkb)IsP}S(}+dFF~pN=6@Y#0A6tk+vxar;~rr6p>E8&)2p z^dCvS#Ar!lhDnHB!FQ7Q^=tOG{xul2yqr<_3Jfn_Zsa$W=o#Wst>K-%E|w?atL5XU z%s}0#l>QoPte$v_g(&6JJ$eiDfNS}KYjkr>&NRwhKg%^}|vZIwIRsT^Z+GJ#}`;$p6~*ZmPmxQA(kivP8HS^wM8V`UnxIFn6}jekkV49VBoLxEfX*8hmi@GEeS ziT~$?ThE%rX(Z8o~EZi8CqfrZOm7j8~dyFi;eLn$)jfX>wM9aQ{gK{qQ9E; z-r4F_N+10QMON`XutV-s!NVi$T2=I9J@1t8m6SZnf0vosHfHKf3ipnuQ)PW#B6zl` z`iXA#&6`Nqi$ALB^kWq=-){dDKv;>quh6)aC`_0>n#5S)kLMwFL$LJgh&0@2!DQqn z$m3O~YAGu?vtxn_tZ4@PV-cTYM+Nxw14jyCilLm4VPbowk7d9e4rq8sTD~#Y`wjHB zrGCVPpfp?zGysrD?F0~pC8QYAnoK=_!S{etdz)N=lBjYs!ZOpmDcX$|Uk2hsy!j{R zt=spuIaVAMsaWp!ml1Yc6@_lk00hior4&rN8!DVo1!02D(1A{-{P?(~s{hixTxYYW z1ZpEiZj?iAQ=$si$mDRY0OR`i*kU%Lt8eAdKqOOL7~N$U0dlzL*}5?E`%LPHMBQ^vjZ5gp!o2__SI{Taha+uXfd_cY zu}5gtXjacPzox*EI62{W-uLveOh=x%`H4wBo%kZd8Nlz`EtVw3UjzU^Xn+f_!Z5u} z;zkQkXy0DTfJ`&wUC@jh8Z&_fCz~VhHU`OT#+Tx0g_bGaxIj*}Momw`5;Me`v){r2 zHSB*fy^qL)UFoCL&dX4Y9^fxpYf8V|qIf-|#iEs5(#!3)!~b!s`Cn|;he_%7hA*9$ z10)0#l75$Ox4%yKV@2cx*b&ya!;`)3cUilJz{to7RNjpBI;&hrSS7UIiKu->la^;8 z+dYo@E-JO!%&A#TI`;3glO<>2|o$1;)K)Xd` zY8|GvOQCs8HpFf&r0L|xyhciE`&t9eqII1#yl-#$Dt>+iL;k$IU}n|!7@;&6bH1Fy z;x>^OYr+b=lGEad7Sgk zoP>~*VdsygZoSv$Y>T=hW)Hp0_C^wk1Si1a2If?7ZufJ^jArh&6BEvi^46d0)IB4? zuO@}(rel}qhDN{0w`BYq`P&%DS?kuj=6W-zK7gx_gT5eEJaem8G z#8PI@914k_S7EodlA<|jtsT}267p8m9D5~6rc$HHXdRFS;*8WzjCy9Plg{Um&R^k< zFDq=<_0LC;!l;FRl4{n{Be;3ENEc!0e_iEx3jsAa4MYzzr|3039ZJ)+Ht&cnqnisA z;iBTr*O8g=s9#6#%zIJK4S#^gBng)G=4s?pn*6|6w&uE}3tgZj|zcvoT)Q4J6el7*gnl3z$fj((&sNRdju;$_q#))D2t|Ynz$qLN& zz;&j(s1>j$P0u#Ix{K!E`vp|Q@;LcV9(`{a^ zn#{MG(GG1!o9@Wl$(}p*c8&^COyv`=T~C=Fg(U6i&&QdIvA~ZmfXj?Q@lUk*P$Tdg zglBWY1W_f1eAXvW3Kws|UKFqs`PT=MEgJ{6!MrP|?T(pMXIsH@>3ZUqh_gof^q(Vi zpWt2R?iP7>RHnBCmsVJv50S6sZ#@C|d)BBMS>UjK>E&S+RMC}u7kE7HA&yg2zg4S( zhPB^iCr+zmkdY2iDe6GWJYOjND-G|K#6SnyCTrA%b@Y?|(g*}@ITY54TWmTN=)f?P zj~wz|Ibz^YnmY6P;zHzy3~la{2^ZgN3HI-$JLDY?-#;Dh3QHfZuz$L}9-%t=@)b-g zPAu?$d@13cPQVE#JkI2iW0pkPe-Z_9PjRCR+zYo=)hcq& zZoh~#Pv)+n&rp*QGe5B@L?eB9@4vH0tOw!ECrlHFt?`xCh~p26(FyNNu;NUfvPV$w zjv*){E4&xiJg*?hgoo>%N8$AW+tiyXbYpWbFt=w*sc07J^P~Wv45vZb?bXRwuEpPW zeLKus1bgFhKecd`b7|Tl;m50C){i`IZ<{uDQ3O0Zjy}FeOe`4*;lc|39@$e{aq`~h zpVk3G>(O6Y9+bBt$$lKnJmUOx`mrt-L9N@xV8YaAkI^TKHtau`F9EAwFpBt8F}w=M zzzZfY^CqcUIN>7pEn1XQ_X`XMhzWrdv>i8RqzyX(1c0+3=EBmGqV$P?K8d#NwtU++ zsJtsk$#gI0yK_izp`ujzXGbC~nz}vKO@+{T#FezqY_(QIu$b9s7G6A#So4cb?wS0F z4+y(;J49SuQkZ@)svhqZrziZEa33<`ko9`Nrzb5J+GTFA!B0r!!30*<{P-ytqqp7o zM|Ndfi5fNMq%uA#v@y~~7~rB~thLO(Yb2+lcjSnDe<}8S$4>53AxRjF+M=R&~mr^hhirE;Qd2-^^lqy9mdJ$W;>p z7i`JE??4e4qgVR+x<_L`QUBTo5cDbI`YVBXdb5{V1h;c8X}x;Wb$MQ!mL;$ikZA90 zmuPc5oC=gDyUppAYD5Z^b5V*G5xH8a5EFHCioj1F9gLErMyAyA^K?dGO%A}Ps{O+i z%?1$W5nbyxILovGGOp-{gUeT020wCh=j$D)a0sdC!C>V#-T(Jmr03!E%OTgqga(kF zKK=Xoi;y4q^u2s@WVVENXH6jyDVwU-{pAhmRB9)5h81vbN}F@-pCWn+6#<$kG$W&P zXzIOjo3cg~WNn$V-J*4EmmD`Vl$12-?P>0ufKmF25qOshuz8F6gEY1aCG$_X*UYWT zVW}+oK`^Wk_s+|0H6Ty+92jCJK4-iVVmCvUae{w;9`f9RO}COcRNtLh!`meSfYPvN zWTku6<(Zg9{z@zBnFDt5VSe}3Vb$d~=xm3-3X|CF(`ZhekT)%tX({wpLvEAVsKZ}l z3&=pP?owfeyn;9DXH@VkO*)qcGl&{(-|?Suf-Xi($l-c##&Ys@`+5j=bVJ7GukpzY zagUU?7U636viJ-a^-xr?|6Hvzse!J_207nSqXqSDaVI?}k?Fp+2hEU%Dk34qJ0m z<)Q4^@NR45tz)$A)x#59T>^#^y5%ndjJqx{FO}H1lDotR$Watuh!>6B8>jIinM!10(cAN>zpIW~s$(wV|R#tNN853C}@2F~u3@kU@ zxVwLFTSBchlkVQxI(z%r`6Zn*Ia``wBwsL?Do6%_2pF7mpubNgGF~pnzRT4|y34-< zd9aUM7Kl)On2koB9rG7zQ-VB7`txQ`e~oxpGi}$uYi8cMl^H~UrjIpi-Vmy+QIjktlrJ|ql^A*iBEBG<(W~{XC z^p%Tr6R$?7s-Pcr$_K$Vi$Mf;WqRWUgsvszmH{!Zz)m_z+T|S8d3sn@T0E$A7nxLp z77T+M_q7HE#`%wv@p^)a`~~u6VPtHozok)C9O@xDBKYGyOhhAqQm?o1X=b!i$->X& z=)nnYgAe)xuP1v^Re(W{>Kp7e?}(ZvKGN`<__a`?JHoYep%)0e0&hQwg2caO8phK+ z2b=}WU?AK2;qltNY$BzpmP}D~Qq)2jN3e@luJt*d+iP|y>otwR%%$J0X&BvY7R@#_ zc&9?8%97(kzgol8wUMg0KZXSQ%kUV%wtOtsZ+bVJdP2HXQnbn_H5KdB)1U=dBpguw z9DMq-OFX?|{Ph}bmedZAsqM)ZL}+Es2T(oFVU{itnc{r1^B zUmKD_$m(J#>9(nWXX|{l=U`40=>B$I-O^@^?UO8&enOsX8$q%bawa~s7%d?xyXiuF zvgo@GpM8)bkHBp!GeuJOhMF*QOYh*pzv@QBG%81{+vjnOo9W`7ipK0;`BN{o1qmv# z@cY8(dWjr|{?QUpz>I>w3JInNl(186d9MBmW5`$#NIk*Xa^9%SpQB0c^b$#q<5`PR z)r}i!?y++gdBEo;5BPb%fk(%WD6`_s|wh%XRK0Y2>OS$jl#eC*=}U-k+#B{DOg zh2vRzBiE9FR7D3{%8e_HHBRrqNWqAtX?)Nx%}9|SI562 zqxFN5WoVDjwo%KmRsMv^Zr-D+3NFs)N!P5VWq&IEJ~X7EMAdOAU>O3cAPF;vl6fcB`v$yYxF|XIa;<@Ul5h#%eaj|6cr4dnK7u$Zfp~Jz z9n4%{?&r<$8E*JVogNo<3(q6>e7D>P`ujdwy=qI_urdpMiwwSPx8~f{<+418E{>+< z%V>Y&JKpYK@KV_Red1suyM8TOlgq}B_eeB2Q$+WtdTOl5!azDy*Tm5a%eh+<6V>2% z*Mkg+_YzCW3vZD3_Q_7hjq?I971nWg?DaK4Zx$PzcmC2DzVXKkpWJeO%{ zs^%nd9hv1p@x{P99gz51>f4HxPslHdT8qXrKS~;fr8??kSm8$d^P3?#?>)E`fwqcz z8CPXEs`>TQkL;x^H{zkwNkzboQ(US7rs;thcEPq~K|_BtQHrp3t*XrMQOfdZDMEK= z)~*4BJUJa~=?)M_Jk@pTT5f$x^!#SWDLk&BJU%;Hrvus)fw}*buv)vgE=YXXmF1=9*?fR1JpsO4h zV460&%OO0DJ&mryqv zl_WEP46A~gQ)Es8;l!Yj$UU0YrZ~>M6Dk@_GWBm->8P9e2w9@ps_D1c?7pwueSOBJ z*-UC>WhIH#=aU){-U5?BsCO7Ul!+^ogT%2(|pEA2`rEYY6ql^=M80Fup z*8KQ?@+O?p_u<{+xpLolOe7Q&*gnU^5Ksv__6!XngwgY;|0jS_7>3bXso{Nm$^Z=X z>cz#`Yzoja92Aj)*L}GTTxUB6=|aM(@D`jGj*{jea96|t%sC&cF6%;2Hhx!^Ai%r# z{xe$HGy{ut_e{^E5W1Cjcl!Avo8Rm1LvjGn54tZAiIVZeLb&^se6uAl)h1peChq9J zESXR))tZFq7i1?LF4TD*r@xA%SHjXNeykJ5?q+9yAw ziA?@etVL;(nPQLUt)xH5IYq4d=FxqI=POou5}6QZ=V~}vx2`USzc$X3SnX|-e`WqA zGRAdnbou!YnE1boH%j)6oX8LDjiQ-ro#Twk3vL+|h}hzbrJDNejY`ce5${v_#Q?qj zL12Z#*9P3h-K;kN+(`OTdcNj$mCNR%&A8pJK^y-|*I9klySi_T9e0CSBSUl5RxIhd z%GwJowPxq)NwK?w8D`}v0=0r2qqMy_CYwi#z23J?=ImR9_d$!>%U)SaQQb4ChI+i8 zPQH~x-c(=}vO-+~U`pI7_NXiP2t=eLtAyFlPZ={Y{(34UEQ4nD(+R+{@{VHma4mB- zULt7#E<}0~PYW$IFyTgg6qR+k{|p7F3-jBWnybVBDx}#(krlY6q8}Y=IX6{993D2k zy4O&r&3;THVfgR3;G@Ob53<<1Sm4kPd6~^-b|UbyX&#k_IDRyTSeQY zd=n{GtS(a`1G!i9ha`n=YtII$3f4>_9RozCIHhY!u!=SMgJ7F1q=J0;c}PmKn2}=3 z3`DO8-sF$BHfifTs+opZAN3naxfGKMKYV`mdN?)QYW`$W!V}jgxw7{myshC;+$)3G z`pR$5mzI}}FrRn^J@K6V&d~Vo(;wc(XS~mXTTsi(%OpXee-4K4D(jlXo>-kr)Q<&@ zo*CVZ2Ar4Z*9|t8wuri)v>rZNO9H>P`K&E$Ue?lJrFW7BcG6BiGonJ47-@rMcu*58 zmU`A2snfk=(5WKuiJgu`z?mX+sv^j8vKj0+8IjmKtv6FfJz1IrlaHKm<1q{xl~I~( zQm0(lVwYtN)+jMX7Mp2bQ|ag)phtgHrkm+q4>os&k=Zz?$Nz_)O{f&#KTawLHG84_ zK09QfJ9>vrL0z0pO(krje5l{=jM7mnJtvHqV%Dlrie`hKws;BUPRW(ojb%HrScK7b zqGh4JlJ<;jS3wwArX)`9s< zsd0YnaOo>8B<5!RB7dEQ5({7e0G6$(lkH@#*E6OIpEQ(Ts)z9bxcXW(+-pzvo=+7r z6ps4wFYXZI4F(d;wIfP>Lf$r$OhmmgF@Mc9Lg4hDIl8#L?mUB}lhx82l)r1J zbDMQ@oNg3rZPkMowDB~X45dnR*kp5?6Sv26C?{pl%({OZp=(}8JvMr&*`V>_r4El` zGLWTy-6)D+o$c~*6gcOy9g|`os{gUyzna)4(&qn4Sgox0sEa$Jl7>_?WKuhaQ<|Ub zEFV+)2HxcJXl${A_>W%{ZzcYFEJJ3|OHn=sVKTN@(n;c%eIYBvzwsL)2wQSU$$Wx; zeYd;xHtD%PhpT4Kbsdc0Lv2LD=1F`6;2)`a2|fz9T{>Kkq@%uGaZ7}`MlE0FPP*j*dLRRB&5Tt+ z{Z2Tc;Ev<`^;*?H@@9C`nLkZT4HO3v7}6uk{+2Ur>ybHMWv6q+)spk&mNzq>>StJH z?Waq+E|_;P>F(VIWW5&RLc<2CQIzb>-pL?^e7(lW4ImUTK6q$r#O?55w0qHk4pl#& z>=Wa@Vv$my4PTKJ-VB6*w}S#;s9}1S#b{bqM++utc4o0 zVxRDj#_1~Lv?>yA$w>36tzihh)v1kp|9hA9;_}jXZ)-$pLURol|9n--D(y1IJv=0Y z;`SC3?t0NXy^K$8O@3=g9eL)ia~65{{*riqSNeNRrVEAK)f_+}dK;3=_|vie$5_&P z$!%VbEvXP;J+$1XkRA=9?l&^oI7F}Bcu{N*bhC|Q<%_3nGch$eG6KSICZ5T zp8c#&CpRwoVs%S8q=V`+t*Vz|a#1Et1CPn-g?Zmx!wtDmc>`>qD-@bCX0?poXf$rh z|5NBTCoAgnVs}%iwU90{tGd^P*Yj0wzhW$-L95mLHm-P6`rzaZWyq!XM1i(a;!3mr zG{7hVws&CQ_~w`zZ=p~Ui!t2VaTWV<)1QOougZ`6icWce*Y-ia<^iW zj2D=y7;q8H_JpMI|COLEImy@_H$0hybr4fVtYa0f~o)bS^WLJQc zJwj%35!U}mBp#xnv4@k*R#i6OKW>L*ON}!1dd1TozOyY8#{KHpBi9`N;;^qR!MU1$ z&D+mJG(_+Vw9uX}n~Gwny)f?JANYoQjLw<^0$^`-@Wz;_1L6k|eV$XTPwIKHmp9H3 zIIZ4SUuJpea4nB+xBZWdzDoJ%V<;SbK+Cy4ZKyE%^Y^F8Tr*q{0k-0UFFnORKa@$I z87bC6V#vzM2I9xL$A4*3OjLyV;SXb~~q=5H7dI8q6T2;U*B>)~Box!l+qMmsSV{S66C1#QQR(0tuRB0l< zdbeBXmjtY2fTtI{K+tta*}F0Y5w061-kFgHt-{L_oRz@qeyVMb`#crN^0!CXFUgT1 zUBuXSiy5LQ{q3cmd1*C?yMZAd;wKNqOWjU@1E?ev^)XuWnDRQjGVoZn3x*scKa@Pa z-N(sl^X6gF*DJidBj0cgV5w)N8LT!wQfhjG&Z{q-4flH8suLp0y2*WkE zSm<_?0CcaZ|1i12`TQc--b5^YLqJ(is1Ap-;M8Kf=KxHxn2NR50^@7P$M18u8Vcg0 zUTKat3DoQUxj!`DI!L{{F*)B1!%hbv6Tv7pp;4OH>Aj!ebf$($MDXqweDxnF;CD^@ zq_6Ip%j$DJ38D9mMqR7_6e}tzy3ekFtJI=F5svVty!%#~H#xVm(^x8ddEzKuX8K&n z3CRwpslD|)W+5;(>5Dw?OAM|uk$9S(kJ*SUB5FuJ?Y5cbr3Pr4lWp%!)GD|S2IiYG zMTBw)So<0_tDA7o@qJg@VUMzgeAikA6Cy^|BIbo+z6h>0G8iM4{SB%sTxoR9icmq0 zWT{pf(F)}!e0ACZV6i04DlhjAQSgCwOfd^HpIvGJ_MvJZjz>eY^3)$%~2RnZhoyaykl@dwzJ+ji<7@_!s~F85$t zlJXunKoyEOdcy!fEi|Lvdk6;8()o!N!bbJ@kde~UW;JjJN86TYP4VZ~y5SC`bp@lU z8L;apY}F0qYbR_n3hRSV-FQ<4YAaEv!Wv67^RTHvpFPgRg?GZ3BT zkn$TT2*R`zuzbrNHsFKG#4}qcruP@yu>O)lCs@VwP8l&V()0A2PMF)rOq_3?j|Y(A12Q{hj*(1{}q?8ci5lY?=`Z(?gR3jeO?e zZsw@g7kj@U2+4D9vxhL7Xu4l`YuC)%(-jz+;nUWrTG_H}WtCB;-n(Tbnfcl~KcU{9V(*Uub9|JL*kM7ynb+*tzPEi`IhITwt0GPK0f%j!Qq`OQ&2m@qy!IN;4lF&2$V6I+<$o_m3 z$n<(`1Kv+M0cd^pP_DF+-pVt%;Y)$ey0?G4m;(GPzPg)Wm>;cee7;*J2+lVIvJiAKfVdbmxLGl$YUoFOpYvhPC){Oz zJPyAh^5#|2V=sEvBfEQW0oh!uNdUFZR{V#ncsRdo$@Zkg&#?^4z>@q zH1K#Iq4^@=t07pvaA8KheAmdh7$#ocr)p|+hK9>9P-iUMI9(783tkF!qfuiBd zX6RjtJZ4}X{`U6~YY?hFNPHic6z|xLY0lsj_o{he&CDshu#Is=dbHjHVzcpTeeDlm z!V%+Z4|wl;`F!(F#my zd~%W;Qum`wVAIKXsa-NrW@g@8EBLc4nSCuR#-cZG0Gw+5Gf8k)q<|%rqk~&LsIy!1p4O%Yzk_%0Htf^vXg}Fny6ghvk4E)Io^tW1yM-QoZbPKL>Li z@B_fK=r2D<{VE9n5?=@R2qhrqb*RJJLQpW@>M&$34g>lu-lhnX#_;JE7dOT^ZScv9 z#S`})IUEEkj$Mt2N<74`RqzXh_igPxAjV5MuVe0?js5jVj({2z^~?h#jENhWKMCco zVeK)g_dr{%0t&=K=GRH?2uomt6v(GzsP+>riho*9F8mDVR-6Wi&(Dv8aJc9_T--WG zP`IqX%7>={Q_eSI&mLZ#yw=l-c(Xz~8Un!gNe)zDwaF!F8K5;4Sl*mT3a0{}#Gv-= zU&kLTceRAXc8S0o19%%`k81Xmvn1Px5QP_BSEm(0Jv`L{Ud#n1fS=IK3@3(UubYr1IR9cm#SYLs(R+;QoK(#;oB3;2TM)4MoRztO!GgZ z(8as|nQNGdY7XEJp8-Ra>5->th8Tf+EE9c_I+RtUa{7Xg$orZ}=_gv4C;Q*(h(NpF zFA+pXHw*~iv};a45cw;)u4I9C*Pq9#RgVhQTJQd)>e9VdU16omE2RKnFEvmFX)^}g z==^)&@CS+~{L^qef7u|Jh=3MWN*~X+2B89NH557?j$?$gJN_Mavl_Q1c3v`$t@vwv zzud;bEq_=qc4Na=&^@9;#*S)((;f`_t+xUn_tsrDKad+2_C?H!hUQH@Uuvs#J|DI^ z-_ICvEQRmsNrtuU$^r@?`g$Ikt^p%NleZ6@b*zJB%(ROvcwY4|pLM(C_L+cCw#C@j zVCrj20xD3f#i|zxDB5NSeMF-Nsb>V`itz^4;=0P!YsapwE$YDSFGK^D+tU0ElB(cW zuZ6hNh6_STppt=4W85pSa^ zlL-^d=;Vt&$4KZ{HM#Dl2Rf*UPj}0`%kk>1J*s*aU({9Q?ncMv$9^r$p!GPF?Cw0C2-2b?QsleyK%}bs9 zTq*CDFG0D;4+;yLoN}e&%pc9v6S%Q0DZdl~b)gxvh!0#yM?q}L8PmoOMqwMJrPSS+ z1SJCNzirtGgo*~fhzn9OBn>p9?oM>ItLCPP_^qz&io7RV6Dboav2*dK%>4pz5$U}7 zrUC`zj*;Gc=2pochsxp|60DMWtsEmghPhT^^<<(`_3~mSGKlw)Y?G;>iOr^Zm_U&%xAlUu=R01&VwbeCmgkvSVdW_Y=Y&FQ4!?dIns3jL5N?Gm6%YD$NRA{pDWx zX%xkWD#P1+XUkt6XjyPt-ZLy!hw|7lo5{x;*(EPj3=K+P*ZUN;5j9yWr84Z}Y4Ni* z$KFUJ)VyWWb^9+~MYbWUM4YQ0Zv|$ssJ6xCKWylTkk#&9l6~q-#J<2w_{R^dsty!` z8~!y_B9@_RsAY)pJYsG7F75S{F43y9wHV?|x*@S?97 zw*^?<0B|tf9?#B=k)`&=(C$7@0rtW$B#0ur8njqPyjZ7UQ|!=FZ*;*$k1AUu>V)i? z5CBG=goz+@M=(mC{+0&MuHbK*TT{4xbcw+&KYK{w5i#k_5!*sCm|5~b&d1fHHco)V>-_XF5v|2@ zngdpe4B9+qnbOlK_&7rdHqPqWemLTrKzl`O4^Cq|Xh*@Pk%;e}JEVF3#m2__vJL^h z0K%Rtu@^ko-!P35Yc3rdaZ=3=q-mE3PDtbb%^3k0fr4u(K-|mN+W-7yd(}7(mpQ!g zE4|$eHiTqz?d_*%xV?|2jhb1*7mRG;B z$GgMFs{-C|B=N|#`#p#=J3DTSCCBmSD{40AI4rz&l8RH!q?`$)*OZGf&$*(W;WqB& zp%QXEP7YoY_XZsPZZ9Fv8SP6+-K*En$v7#7*GkbP9?Z_9O4s3XYMDb~St+qlo zV&?(4__G-QecV&fVMl{krXlL)02ZEJ^JbK=9pR%;WQK~{DKH5>AiAcbFU@&@Z{LZWTkhN%`r&<=CarvxC)NnFy zd){Vb0(FuK2_Svb?3n{!!%pBfI(Dl3z`o62rk;bby1KgHB>X?)OJ$fFrI@H_wOKc| z&&5Icgyz=01TQb|ceCy=6xzcw>#fdcCSbF-Bu@rmw-Ej56# z5EpT{(FtSbdaA=-6k46=@)Ne~vY%p243hNC`;2nRKG|5~yuUq8I&_a><^@dxh9(fj z!_^i}jK%Nd3t-%nGAO(}3+HL|dq}iC?6A>Pw_`wlaUtJjTCD;-e=ux-iXcLb1HXO^NqC zcf%1~pj8%U58v&x=!Y`x;ER^>CaMGO`=ry(p%c77>ffC+)8q6X_mXw-55<{D<8Kj}N506d)q!#O=_VEI_PT;Vkq8myZHPGP8i z9q65x?)e!g>}PzrHMEKz0ajuLn_`W|u&k zi-Zg7RRNKl0KGTg-p$}c+U?lM;~ai&Y|=fksbO4ct%~d3WpSSSK^Hi@+jk{WV3@Lu zK)CwAS6XobmACsN;pGsadXbJu*DG^BrBOA$&7HE*rHtPQ1AHI2Z_Y1tyP?UiZ-^Qd zh-3}HR$;?By3AwAaDsf%vS7(@F3|u$?>1IT9_(VTnG2?TB6t^y{i4x~mmyu2xYq3H zW4mh-Tfv~eT$)Evp&$~*g{nd)Mj^M#k##E+r6CVb_e)B?9B<>zTF%jl1MRB68d>ZP zQT*+eW?_OuYKK1Fyv0`Xh(YgVkJ79;Y1rVHH=D zCagS0N~GU?o1#uD)H6r1hKgRI?g&avS?tKEbaGo#4$YvCC&hupFEo+`o4LqZD>?~H zu}G0v5Guu|{7i9DEjrQEtLRs!xFD&0l6$n=dU>HKToS1QjM+87wl|z&2IjT05DkWA zGho?GiaE(#Z{t zZORz%x>xbD_Vw>_+QgquHleC+wYOQ#ZX7@*;eA@J8ipYmV-K+;b z%?wj(B<8f?^HuoR|9GXE}=RXDx;% zjuy&K-NA~$vA-wajBj5(rngQ>y+TW3Faca-h*}n{w^(TWTvBk6!*wFkG#iiK2h0O; z?sHj-{it_{Io<{w*RW~WU3Ai3%52iUKzfaxjvXP-{z%ilW+OxIV&-Dhnl^wj5&blo zh>1rkX1Gwe)JxkCfckw!B;CgSeAP4bFa0)KyF!8ssR^q_ml+M)Oc&ajkM33Pb#r$2 z{pN>zNPXl!y2UIN_0&pjXQ1{gi!u#|@#G6Gs3T?;CnN3kv8f0yU31O+&)f&gx$+cEIY!x=5LWy zu@7J3uhGR^b0%|4^&P@K40B*X6L5=5x_e4Pv0D&m47~85YX%VC)fkrv6oO!9mC5fLT%Hf)9G|TO)%|CW!*FJ7T=|TUF54iVr2f9;25gO;Ecx8;j%;k>(nmmvz__ z){=w3H=hCb>*~>+GFJ3PtsB)49}rDUOM5G;|7t8tLdP9q-0sH`aNCNWKd}ZjIB^LH z?M0;nacBY?F7_$tJ zPUVOrh+c`!s7(_EP;7K#mIHnzGZ%=S=Qnw^ND!Sq(b4J|Js(TNRL@g%E=J@tg08VX zB3(H->DG+IQV7>l_!O_xD}S2Ee=hW@7>)G8i4yu<>tbfcONd|lW9h@Z!UrLZ9F}3t za5aEomLxi;OC3(MopaEG17UweL&p{XeF7m_HCHQYXmbHLbc-C&y?q!Wt~4{nqjPbZ z))Pmpb&uv>gXf30lc-)0xtN&vUglHhuf=8l@}NpxeYYY=``cIItxw?|3{$93HYdFr zq1fTmXZ!+(@gO?2ghHGb2Ng`%IBcf3yeF}j$Uh@T-Qky(rGmSnz7@>&6H!8k`{@=HC` zJLgP*sgk~aTomD61szuM4@Zd>BGXy8>N3kz0P-?oA*{>@EQM5-;WFyWAk%;uGV#WmTEkE`6)o;?;yE5^Z%edQ=PFFh_iUD(Cz>1x2-S;#FFQb5@ z;8e9L+#;d|8-ybgMfF)(#X+g4gXU*|N65hIX~4P&1<~XwyrW?^8bJ5ye^2&0Tz2^9 zMskS!=0>#0*;nO8tTeFDLku{D_YT)$vy&UKL!|Fec1#r`^nunve#})KjC>P_Q;6h* zd8>TPvHAfzI^^v^K?veP{A(Xk?RQABTNQLC5}@MqdlyinF38vd&ZG;HM$!AF_A6~O z+b<3+{9(WW1ckZ5A8@};$dk3SwQZj2PH*Ig)ANnA^2#tg{PH=PXNB>|I|`JY(?cP7 zkQZ`9FOVd6!9=b4XMEXgs2_$IVw4~X_L%0`>cRv10O#x8OHe+L@Ib=NuJN)rzHGFM<51J{}HugdaHce;dLFMJ4SbMzCj|v0lk%N3f0bpZF6ka%w4boaZA1A%iUy8znfznWrPm! zzaa@o83&>hm9g%mzMWqcgFsszq<|ajZ4iuIgeM=XN2C+Fr-2h~S&nWnS+8*Nh-Lg4+@{(4Z85qq-=-(hggMklRr68H6%=dV8)P;D6(3Bg6S%2%IQm!* zemTE9AP_&mADyn0mLERP8?ftPCt0;UB&6tG9`Ix%!j;?(lq_A90JW z+E%}g(kQKAUsEoX$i*V4!g#3LoO;*J;feD>_(+kh#YPk~5S0bmeu>CwHqS6}0dF zEPRDC(P)@b`L$x^^^ zsV3>gJ}1?9b?Xb2q4iw&IL7Ni-IOrzD0Ln+DO;+|!U8u6Dv(x5YTWy^9+s zRHlfZ<~S)Q*o?h-oono$ZH(Shsjva|8LC-MOowvTQg0{sthWT0%!i5xU;iWPp!b-_?uW*tuCnlGN~XIf`i)-bCR5InT4jHO2}(~TFbA9 zJ{!>`63AURCD~0^pa*Xv?1HE3`--e|*y*98d}XXR-|mNa zzB>*;Hv}IR1zA8Fenf8@g+<<#@VlG(%bt~}^;snrX&19u0<)Y(6S@{?ntgrOT;uoC z>s)_q?ZRJ4PUni$@vS24P8vnFCq-F${J7ZzAod%&-V}aqya$$!4H4{xYMS3#s02OK@Nhd$w z{M-A%5W=fTU4td8%0<@K@pAEm*bk_o{H#j+SWdcB*HZOiK(;xSrwp~7RtE{N8%e>} zffz1CPfnNPQTztT;B(w8j{O_FNrKU8hz)Y#64DtLgYzvRG%}FSCRp@DZ;Q7=J2CwK zW9zNLqHLfxT$K`(knRv92Px@BrArVP8isC$?ohhBLs}(=?w+ARx@#zr?uPw-|8@Nb zd;j~86HexuchpH-b5$FlP6w66UDVI;Y??@vr1XecVSDs zM?c$TuhfbPGzujKo{7NxGokwQI@0wAcwaMnpo9BE0Js%>wcWY|NU(C=@zB7%PYsI6 z$6svhjOCo1513F}SU4Z8=pQaCZLDMy9-5hl?Ydl;Fbdz%T^+5`v9p8-vz&+!i(4DC zdkpiO&~Ke=fHZcl?ahT)Fu5R#t=|UK4rAsF~HWQuBv0`6)AO{wux=JGEv*B=XI zZqJ11m5Uxdawta=uO*WGgoFoE%VZs>S#OSkr|#~4^eG)1UtC!b9(9p){2Lm1pEMGv zaZaxcuqZ)|Bc%GI_lY)8Y{f%J5N~}$O8zIwFJrO%3r!;}R=PR;z)j(G$ldsJ({R@s zczun@Lvn%k3*@j7Nwm~*BMsdZhyG}LVW?Le-rA?e5~?nHw;*ppylp#~A1%=@;E&Yx z*G+P&wvnkdO#`}eCHVObHRBr8p`Ni+T&~cGg@6bEJs=FCvOb1xWhh2*7j6a+kG{ZU=N_Kte4G4QFABd=(mg)n<@O8{iP7>w8ea^{%_2` zu)0XW`DE@$unJsUayjDsvg;~~-F8j@jBc`0Bm49rve^jWiFlMt^utvMn4O#>#aYUSeS(yS`)m zYti|i@#Y~ecRDSN)bHU!t5`8zs?F=duaQH}5v1Q=TwDy+)?VM->@C%*thL$ue+6m& zn~cQs0!}hh2g$M?WH*>+{))xcjw1U2=YdTij64%10#15RyPsNhJzZnF((W^OH#>HH zeu!o$ZeL+KbpR=C(u0dt^8GJjkOa~O03;b z0`OSnY(+Z@eTi)!zV&jPznz{9C|Vz!`+N>C8Lq-hXwOpdYdD z%ih$4))~)mnwY<~9(>IuZUI)%9QoTP$)@n0W_c)jvaiW)TfhS6$Y>0KR;J6D#an|> zrA^s)S%j~s;2{#SJXBI;v-K56XKZUQygcj96{W*AXP`LQWUkSap`$E)&uudasAf*SCdgihoPg% zXhE(v-VV$MEq1|J^zS`WZ>OtN$-OIH%=`pIIS3PCtHHkgeJ6Y(_*dBZ+=?G z<+4g8!+o$}t!9Bv=kGON)g-v?8n!aFz0%Bc4;+lDoczXHT#V%X+cR^B0NH*eij)S1 zr(&Vf#VI52h7?MQWnD@YjLY-Vp8CV-xj#+K0`dV zp{=vU4rG1xdru{$@o{GRKn`|_a4(eC*4!W!ed%q70PS|2bH1NZyBz~|HUTFudzzyL zXXE07$(d!BhJK1XoN5{ywOd1cTwwdG^Whhd-P@*u$V=>6Ax(KCf~ zqc0UGXG88r6{I{RQK|!O!zp;$#ic2K8f`I%llF%5<@L37N-;{wFO8LIo|jqKAR-Kw z(|CW97ocyOgjpF3dlBJrLmZDp(k?hnB^vIds8nODKK|ya_J)SIyzdR8hLbIqyy)=? zyDHKr%9v)C37+`O%)YYGJPj9gBkALHM$McZ+59t#mL*mjHPlf)9)RSbXmp*)(9Li2 zG#AzK*X@eO{y4{36EFYS*U{5o*&(y*!epYUL&)jY-cIBV#3pAr+5zb@vQD5QyvH9UiX&&`TMr!jr&nKUIkdG( z4kc>F$-_Z$&GG4B(+uJjjp>#JDp>#GCeQy@3VtW@aW&ihX z_QRAAxGb3uyY46_FJ>q;{N;wm}l-7_S9Ov zu9T-K{Z3BbshBOVGtDDI<@nqW&Ew)zB)ptJ>AP~K<_wAPIsMDSUk{h%-tn-)Hdkhrz40le zpMBDB-QPZVrjz&76mnAcp`9`#R8S&%+CJDDaIsH{vl&sY%V{Rj%tO0GlwYnx)da3K zJ^}|ST6WfeslO5vLEMnErv5J*7ML_#Ir z`uO%3pQH~zLS$OoU z+5PXQGlbI5FLmTa@=1gpC8wJrX{i+(uR0!!fr790ei?1$Z(dva^-A>3-x+WnqcSM5 zH=bip_@pa}7z;qQ62;BW3o{3uh}cc`T7CRYU&qfoxY`XjMH zn{F@k2WW8`aZ!O5{?OiK=^)mhLv+drwvTA-$=`oM7Yfe|NpvC)y#sU!^pD7lW5?uC zh;CNz-4Vm8zXirp&{S&z)uFwzhj+y6$ZE1IRDV#XKYW~;;Z;$;4xCG@u+M|d0xI1~ zkIJiLZOCfOS&oixjDbXqMseZ+9S~ebo zJ=PRQfq2lk`LcrQM1aw5(2P(d@Z%evMf@;tt(bsLEH0}#i7#MNPgj)Qkx8u}E&aPO zG|U^}PuCuCWUBqtXt!8jYdfzdSwzNTUS&Q)9z!YkE&JLhBse%eIk`y1rWDuro(51g zGSSn2Kie7O@qhGLsI@xld64^U)b%YbEv?FQ0KY=N1yLXq`~Nri*!Fj5ue5p=p&@km zVak9{I*}Nw6s87w{8EpKxSHnP74f^-e{*Cgr+S|-c4Tsvp-^^`P#jxY|&hDw;gALc_yahWaG4S2WKZ*o1hTQK-?d!J233gtO ziG;M4f@Au3=>Ny6|sW-5cfyA#qba7&ydSp5vQH8~K<~7^6O?ka^Q;2I|u7!!s4lT2rFO1PharL@F=k z%)eOlv^&w6&W%$lI5y-*y+d1W6sRLgWoqbl)0D9v;ab1Kybu&<#g~!R+?wUOm=`Ui zBQ7uHY4joUB4iJj3xw2uVEts$b3euVXNtYz)Is{|eS^qXJXVXi&}#IMulk%@&W*ihOEi*`6vF+cLMEZ|+RW&UhhI<+i%{HtuOG6sAMD+c?6W;SQ!5*Cyj%iV7N z>aJ>aB{gS=(I{IqlrruSiW*o9cyqI7V*OR=`NnhfX;KEsYDqh3GF~IaF6q}S;7|ik zwm-ruXfTKLiDqJxGkznTyzN02k&af(SFCOj$@CZy^UCu`2;(q^Rd*}5lo{Lw4VyDZ zy_sqo4tLe>B|VcWPuG@J@9@wBE>-OI--7iz)RoyNrWbTt z6scCO=DE;dJGdE-GT*qA^<9|6*>?4Mi*n_{bLKbY>z)8y&%rXhx7a|UxfM-Oqa%G>N}UW87ZyBe@zrk(5Y zs(wNPq>mu!z3Tal8B_k1+`7a@_e1CP@49cjFOp`54y3BZYTe*0|h|=ZMJ0} z)sC8du`v}`9Rhuh%~_$HjM2h>&tn8?uX?9?j|p^C?oVW_Bw)a9Ibdx`;`*)?hu9_n z_*236rYtEjLg!(K-jgY=++L``ChwrDT_*4^8T*1+1VF}6Cn<*A z0jOzVuQR`XcUFr?tAFTznfFptqEtSZnu16QqKJAmp+41^bS`tEKXYJ4Y1iM#O z1G%dQshJGOM!n?~3iIX)6p?$aerS;XPg_nXyhk+H6w6XY3DEGwI0r!SVvCU9JqT$O zHKmww3A#Pgm&&G0{&&0LC0RV7=ju;prV;KIvBPw3U_!{uNV(B1 z^~FDP7eGy&Tlux3&15SdKj^WnX{!yP~4wduc*ud3Hi-0YUrw_o$wY z`2RK}8yooFth7t51bH{VId~VdxEd?Kp#lk3gP6#L<5dDTosEAIWuXLekN8m-VJ7Ev z3nQQV(~~3&t0xmYlM-aDe~@q;Dt{6XAlTiPFKl58j2XQSGdZn)(`U93mRnB^8{-w; ze{Aec|1LA%VJ4iZRc5>;qF}C+gXpL0A)Rmt_-(Tfoh0UfRdj&0UO;xQlU22}TEG}k z*F(s!xJCPYJ+mS;-3H64B>Mwl?nVcH1TMwE63e8v2`o)?kI&g@_IU-3c=S+kCOHO> z%7KEi`|0rUhmxlAlh1%NMV?b*8%8{i!6yaqK9X&8HkK4uyj#Dm7h9sW0{5w};6fs7b!VrcZ!#akEU^!=$HylZ4x9NxFG#fpx-}KDVtUbqmH!$E(y#K`hePk$5i2o8L6JX4BzQVi~<`@m;^z5`@_yQ zaumhO_29qAY449?5>OHIxE}!SlTeJ=?e0$O(aGtquYvg3L;xVe^h+Mu)2g2Sd%-qd z=}gh#RfZ7p&sP%9&X9oU1<8w3y+M`&ggio-urbxFYrSHo5Jz#*h|qMXsZCzRH~1Cr z4}rZL7mA%1aKT%gDA&4Z-cbm&+}ijgwi8^y6g$x?x?6rbdRG~QI^uhG`S6@ojw~Y6 z^yYMni;vHIturt!Gn367d9u+5b#zqSt*oi3snZ61L5}8!0>gvpZV6w}> zLUMQTtEc0e6xt@M$>QM*-bnQl<@mI;pZdE0Cu(f*7K3!?d_wRCm5V0hDotgKK}kvc#yid}6#(EyxV zM`Kl6&fd}OtPcYi;}7z$Jy`=`e|TVrx?CK&1y8d%xA%#0WARQXCLcct(s3aU*A;PA+FsIClDjgY)K~?4@W?1%$KHfi2E3m&x8w0ID|;G-B==WDPR`f4q#b zf(JqolsQJjd2)7h!g*xEeN!g0e{c6`fIFYMj+1Q&7n6C~%et+rbh{?gactdPZUaHQ z5{(C_AO{Yx8}Zp+2y z+*I5&~&mB;5ytjU^XCTJW%_BJo3z00$QgRYu{s5k@!8i+rKGr1*!OspY?$u-c< z)_F#wwH>U!QJ*uhV-X`5x_$-Ivl zm(^t&tt6>77{~RZ(p7sjpRV=ZEXm$948EAP{pZg1IeLQ!i9;X$ zTsIUg3DqU&mS{ouhOJE>AT4J=*rpM)PeJz+oAGsX*7;O@DxY;$-vphH>nogDJ`;b9 zsJ2@wQU#gQ3m#2Mk>>9-)}%*5y<0qs&}I-qAAh50)S=gRRZ0y#ZS&d26)!Julz96+ z6K~YJwTI)Awm*h+P#orziKd9)&Dw+)-Tu=+n)pJl=vNA-`Zr)%({ZoFYE6|hJ@T~l zM1{9U3!C7_2BiQ^&le%CzT#J}w$O)~0%4{6enK(Ys+`Dt$q(dxcSNAsySjKny#U_ zV*cIBckUk7$2r$@r(tEGS55Bx-;*{y5(x6;Vz8OGgwoCnN!9T4Wh1cF?W7c$t-{Ae zi1(H|G7;as$qGx}CA_^&K+?-9g{@bhj-JFr9`KO#C}Ega@4n1z5c#J6!W3zwx7lYr z`D@RLJGnu)?@LHB{9V>?>uriX5=6HJiaFjiXl#-HTt4x|W)H*S^WCqc@rmfNH7JBs zCE=|TOI8U>yof@$Cg*KD=^5wyxM55G1S!#QY)3i+W|q0+T0DNVf?NjFQxTRV5*<;M zmjn@OR(e4K)G+%rTr#9yg49y@r88~!P>Q39E~GVE_O*)^Zxp=`73O&redlDL1P9Z7 zu+I`@Qp?+S@Fcu*N<*F#8$gPB%IfOiE&rub&lmhPOjT zE&rP|_wR;uZ7Kaaj&hKY(3>?{8y4YhAnL2L0Mut`(M6m6lal z7Uol#Noewye8K=9#)GJa?>e)cKJu8B=^M&^@`k`46)A}K7GnxPTJcR`DfMl&{ff2s zjEhm^k9OIfRAWaQF)85r0>U1YE%?YC(2Bc^2>h-h3$CNOzkc|3<_bSIR00Nc;moY9 znUo@|4NnZ@z?2m0^3^W*eF_r$MYPan7m~i)gml7Qlr7ejKfKv}+T0hl=!8HiN@Dmw zg&3vy@S3(8BS)^EJ7I_CiELe^QvfW@pM?$vW-K-gw~D4dL247n(wI4E`6#RRx=ZM} zkbYTzM3T1k!n|4GLEecVG4aS&Gzkx)4^|JY&j50rRB$MC6%d6UTPjh&9m|4Ae7&q4{y`tHCBqNH9 zN7@%RC?q6ewcvC%{eOGo{~J`kxA_RP2++H+&|f`JacZ8am>Q-`OfqSU_%r6Ni>&u# z?)W?$XNZfpuaBuXtPKf2-m_k4PBTBU0*6H#qZ8c~0NN*<$4|q+?AKR%MdNYu$TKG9 zvmPD<>=3hF5d)z<;sgn>V7T+h9xXIRtjmOsigQLFs)J6+U^Ulb$ST^j!>8g=b6AXI z0{~D0Jb>se9#6r4hSaBmY86RS{^aDxwO~nmm*0H|Onop{6A2sBh559px~3AB*-fuO zS>v8^9#!_k{HX8R>|me;l^|)(o%AS0&k~B*bP>3)W7drbN1kvqwt$Q zmp~6ZyRIe#W`BvNIkrF63<%h51rzKP5H(g^;3B(7gfVX^%`0g>v&Um}Q081rvU;U}wK$u6QL#-q@AWKVp|%-0UKYse1m>Cr z5ugwqH$w1|7;EW6hy)wVwue)SZ{QZugBfo!i;ZwMGh8BZhfh~Ws%mACxb%)spoRuh zRT(ZQ?HBNOyte8~TA1F|_(^zhCTpY@gdT$S(<&{NHAMO==O3X+?9XiPiib!^zp(qi z5lSWTznis!7dG3afeX+QfAg8iITN^D9Sj-Qi0$uB<`f|`(q{_=US*~lYecew8-5Lq z;*88sG>c_sXl|t1D+!9!5_pd&Ub`l-%n~d2KrcRwD(@m9aCpfKJKm~CQ^0;Y@Nd^T zj7_1lr!Cf^OqSM}g1dCiKY%L#F}ZVtLSe>M)?|c*nmJOngIeSptwp^zgvD)=nCPS$ zCZATCUyrIsJ`b5DWBxGwlh5Q8uc_&}*ZGKUIyE}c-x4JmQ3e$|X%frMwC-rKuRr*f zvDyCgRN(`a3Uw?#Gi6MZWNDq|(47?Pu`-BVn&!`U>OsX=O7G{gpf?nlyrz(LgyQIG z(yx36>%476wux`Ma6rrTRX19`7T#*93yXFeem@`MS+u~_QQKI-=3*|_wo-O5otwNN z#BJ>O?)GYM2ygO56c-y@An(NdbGk?^VGZJ|$!!(+>%U(UE|sv<8?23gr*O#k2x)K~ z%!i{nGr~+JrEm^(x*hIP9dG|~k7k1hiqvhIPW!xy_AK}v_mGUX?;W}h|KaRc>~~%c zD%)}#ZC<92EIsy+_eFMCS`!$?|0=;{1KYaq2K*7@>Y(A@WpxvhOS|&Szc<6O)~>w% zZCQ!WQf%f6Q5xJW&7TfHI_4RzvUvz(uiqAZr(Euc87Z}Z_4~Y>lyNbQvN8#|0OHy0 zbzsgs>=sSz?Wr6qJu&mHi^RyLn>#1_$NC@&rW9td&)vC>?Px`xvQSuE!bKQWrEQOL z)@pf8X_K!#Z+&W6FVvyapeTvB`K>Xm?hiU;r9I&W?;mh;oI@52*%@!ASS3^(aPmW= z1l)f2Qfus}`(}_BEt0CYo@&&t({AjA+U*WHV%nqDohSE^i}O+a`SKGp^W$^ky(9{} z$eSe-2F}p~P|6RW3I*lwf2fS^9r=V3TjbZ8{x^vt>EDEkKT+i48_+0rkpJP6cGGfa zQw-~Dyyc{RBt>ozJog>tj2uR|^LI%3=0^V$HE8CuJ^u+wyWjM)>xLu4vwbr@m0Z!y zqM(nJx#8YxzBwX;Ya>G?tsaj-p}2>~63je2$-=t>B3>RU)9?SI%oLY;_23gSNuwp= zkrnE7P0(f0-XeqZKl;jrWAAh=e>gN}fLCcattocYcC0a zjRGrRm)KK46Zmb5;|qm@(8zp)rx28E~=8WIVeMw0O76@{ox zi7fiVN#+!ydMD7%0CNsQ_hO1Hhi<+v zB@rkk;*}%)QeU8iyY35-eZaM`I*G|a;vL-r2$<5n77#e@bSf0yt^4mt-H6KRsoi3( z1zXk+Hls?i!HmzV%jutAz7O*Bz~4Di_?FAR+x5H<)#J_1x?+CiTthBEfsN^H6pUM= zzsm|g!#}GS?QLwt+uN2e3w^}}g!J0;* z!h{<4enO9!9h9Lg2yK*%u#ug?jK!XqVjT^e`Gy4N2|%Ny+&AJYkTN>grCGgLFM-Rq za{w@wNjVwBv{jN-K1YamZ_89)!uGX>65)d8_YDEC*O;DW zeKAEq&0O3kX3Mw4cD|Z5vonTD*yHxRZ%X)or<(s|_4Pv>%4;y%dvI}Q=gr|3SZd7AbUX7>@KKWbE+Bar7A+Er-^ zfRUcLcY?*Vp43Ir#@I&^U0H5c`yBkY8zar+0U!F-nb&x^^ppDr$6cz(`hdyfQnerk zsRhaC&`n!%|+{0mIhHqk^YrJJ7nP{y|?&2KuJ zCcRI#826DN^W-k-QZH>by);F?N_Xbf?DWHlo;a#Q>6>evXa=Jm%NP7HbViewbmR7v zfJ7D(`)%jo>nB%~6J60~m0P}!x^rY&Lowu*tyOIABb$+pjTAUTUQRaJhA}D!!wERp zWMA7w4$hK)q#J)BCnf&c*;t+TW(!Da4#ZVUO<3uTGmEp!Cz)85Gb%bP#b_%D8;zHt z>A zB^4!p(H0_2F36Mo@|%qKN4!96{msxgw+dGV1z~ZdF&R3=v}xm)|2X1R9&yIl(%Yp& zY2DbQ;2%QEr%CFajUAFi=oDdQ6P|jFEIshk6;HNFeGo-j2_pd(pRw|<`r&e;?qMgp zbZ~3>i=iHPeF*8a`Oh!<(xlTCE-UC!9Dl-o1!wXUr~;dvq(ev^q&!V~0{oMmN1?$i zxrE5ve@E-Q(lP_z}!lN!WZh1UqsT92ljphQR6v+*8pW>%$&hW$Z}owoWU{xZCZpi)l#)Ywm7!7YE3h{HG6?k zA~|TfA5Z=EISzVm7U)C{VhGkQG1kRM!m*lJU4p{`hWbBDuKT6Fyz2O5+4-&;042in zr05>Cj-leA%h8568@+sPcG~^&L$KlOKcV5~)(dy35cX- zc5tk3|0>cGZwjKoCw{ej>-&z+V#e5c25Zro;=?no9oqI*G7QWp4q2mCX~8G}XNGkL zzy|iwp~||BfMVvQ{wXL z(?^9|&C^H&6a`*;WTiX+LQzwF^{~b;b18>+KV}5A|H0-UFAvfAAWj{aHr^Q7@e#Pw zKi*!&f$g-IujcQsr0gYgp7<|reIM^7?_YZK*qjuspCsOynln0GDU0A9GM*g!Mf|?- z?vrsK-X+)NePAF*3(Gc1F5FUHX#5WxK zSCt0RZMLW6IuINM z;x+Aw_(MIzga#2bQb((;oA5*ok*}QsNo8rY)CklFzBrk2AnDW*M{`B*+eorrJ|SJo zx0DUEn+Ekkld|Hcg0T_^ZdhJjFt|S?J;=bEcPGwu3m0S$iBSd(j zXkW!GOI(HEJFnGG`1x5ctwD+OiO{=*os+yJ%#$y1yyIiZ0{$dGqwD2Wy?Up>!lFSS zvXB-RiJ~74SRoWaiJfyQGhA*B>W2__8R6uRg?R)ziLUUH*{v0YZuI{CiJOaNU1Zxn zA2on~h!O z?d)(jcZP}aKuabj{5GJ`?Tvo`ux3)}-M?kYoj5a0Y47V&M#i?$KgPWO5Bm@5s$@*A zZOH&)!V;9BL4AIF%Bh+nnNZ@o#4x%tqdXfsxeGn+-@&#Ij2~q3&ML4u^60b zt5Y38w!z2^S);vmIZ$p%v9nYysc<6Q8B0A1#khY`ygA)2(%a>pD%H?!O1N`591VN? zo-StnLNi4HD&if??0({h?)ioa=$mH@9+ zZ!`{@<(UFb@SOfi*~oq)lKgLQpMV-tCUsTewK7+t79nBe!LG7w9L>piULlhVRGdC1 z)E!?o2D@wQW9usN0SniSiEdEA$6iD>#$~cN?{Yn+QS*o?^n{ZhC{UT+Zr2_K2#_$R zSpC$v-7R~K76E(`TdbpjT{=6*ksyQZZwSE{$UFx^+}9}<<7!e?MS+$@ETQqIQGV}^ z;S4KI;-P1<--1ja5`zpMzb|}4er?>BF5M0AB8)Rlstw)jhssH1)J_6lvh>-DT}lK# zBb%zvQ%euH%Vdjj=D7(VmX+TgC~y(;!}F&NKSDm)_uB!FA@1ecf&%)!7%G#=VkK6) zau=pS@;+{*y8AvXfA{6xuRkQ7ThCS~?TluV@mdq6q@>ucw(Hxee`EWfrH?S(UL4cA zcfUSNagX@an2-2fizqni<$uDwJXkEz|Nms00k$A7U9MoO_no|?5f!N>jtL7pr{C=+ zoeGV`ubuj_ToGjOFb)gd$r<2;Itk*h%sG7=;X#Y+@bf9BS+5YTO0?br4u{Kvqw0+MEP!)J+s{ zPEN*A#B8gPHg(oMlg~Gbx3qa`*%c61#sj^NW!bzD7tV-k$bzco2<6})*~c`VY83T5 zUen7T+aU7gK#jz^Dsu!a$Xvy^x4T>?^n^tkY-_8yLA6X=S(ANpl>jW^+@xLpDO5DYKy?tGi!cO@BYp5k{?a6~gZJ zUVb)i5i$Eu%%kjY^j-BzBj_-uz)U6!B)nc*#iI<2Lkzc+z#_@qilc)4aD>LW1?Jnx z#iKJ*Y?01g+piVm6>{^-J}y;WNY?fTlo$Y;nDwsSso~SkzWkHd<9jEaZ z1%es(55~GFIF!%hg+AFLr%^Sh44CiL9mw>FAm3lXHsiVlIz($kw~mj2=A zYjt!j^TD`fo3e@xnqD%=MPB%<--Ys+?s+|0L zE>w}DKQ`^GxfFM}sE`S3Y9(r617@J==taomC7gZGY~K+mX#I!8xuLyG{qSRMcj38+ z?f(?Zzln|!{B!>ziTr7_OWiJ&1~6^gNnW0`&=zxG>C=76^jjF;7T9^3>uS$d&~CBC zKP$=L*3gSh9M0f$b@U}xv>Z+O+$LD>Dw2vbv@lRwmE3u?Z*70y9`zsQPkn|}tPV>U zN?mWkzxmAME|MH<%aq-l%5Us%##X<>?{v2>v_)Q@T7itz6kxUM-ulN^SUPu^5B4+8 zcA{Dwa^iX`?5}ya)r639hcRMbtac%f_@g^yeVY`vlttO;*q`tS0Sg~T{u7_*%E6Keu+5JE~)Z1E; zUEoCOJw!O|rtC2FzdxlvE<21DECb3d2nk0SAz6LG8W#6z2Q6 zW%144mTXca%A1(WL9;B%zj8W1#i3%FL~OT>l~P&)4$C8MG6EX&B~11OM7RRaB*{2Y6)ZL$W*!DZSN0kFTg zHCMD#D^wkeI088xgVQD@0x%o#mU2GWj1x=Bq&b)Clt4&2PN7WbXszv3lFqcXYPAQZ zh65je)t$v})q2VrCgpcCld+8w4v>%VJF@-4=kRtn)SG#{O^1(;0p;>C2OPIW1p=Ki zijh}BfBH0+J{7jiX)LMg!J|L7?sOe~3TY%DUJI!#p|2Rd6d26tMY_7X6I2%DUJri? ziOa(4%AO*nUW%9C4VSq}=-r{u!ZWmxg1g}t_{v3}Fuog1Op?V-{&ri@C{0uNIe{Hf zdOj%##B(sid3Q^A&&H~cPLTs`S?7~NRsqK!-(g1bCY-HGo}4jV4U9I^x1%J;DM!6k z_6aHIc?f@fLB~C(Agi~5tVyM$pdF=G^a(NHb>zp`w${areJ7xarw#IFj?pj9^h--X z`PMP}9^jbO3V}g|Gw~*0JJ3mOegMfpHmj0c)1qGs!WrTsj~8_6nigheEbhpI#RfsI zbLPF-N(*4=bG|pLGA)@r+S}1G<5a4eC*|_@k7QBb^ndxC4vN#e|4LkX9e&wG3aP)x z%mGy3w1tK_QJMfW->vZfo~@)FsZI|MY;r_>!arnpi%uk*l+%e~biX~g*A{`nFo zx;Ojh7d&FE-lIFXI^eQ@#2ufD@*YoPD!Cudfgq3S{Uf%+YRAUK-4(#p`SOG|O^oXOFUZlf@^kDO9$w=6R=YfgBfA=E0!tBd$BiFe2RSKV@r8JLpG0O zZzL2p<0Ei3%*560^vXBoQ90MFgtW$zf-P02+G?B-EkY|D*K}HuM{|raY)ucYgYinXa2&ntRsA9>UHs|ATobhzA2d5EKIM)ENc zgLbXf(j1o#2AxMtZqB#EZ{F((1lH7G2X?h*iTTFOt9$c!8PRVm9Sq!>nFzKzv&<^f zAUFNk>OrJ~GgiQ*3&xWvT?<)WV=Uvd>Hqz_PFqmo#Re=;TNK~-gSPd`MqYebydAG+RRLOXG)5z zAY>t<2KWtgQhmjj`PD>u(n--)FI<~3dY-dhT$l;hV(j|yp3AUxg@=*5(vIJYxKi=X zr^qU?^7X|jVV-%;w+~~68J9OhldLM3I*+zds zu6o*=hY0R`2RX!f<*-LhbDCSl$l3}5l);l>SP$BqZ5tA-ws;fjKDKlH7@YO!Do8>I zsz)x6MOWky30c62@@z=>SS@|jn;kiHY>Z&Uw2KDw!4Gv0bB2Ne%W%;-!-eld7bw?4 zkJ3M9(ca!y;2rv>R|O~p0fs^V=Bo3;^cNnW05bCqe^wdUD42!(chd&ku~L*uEg&pn z9g{NYoSv#C#kjv>w}OgotM`Y$?5*nwiC|=kYa$(2u$abE>lh|T1uDCcX8F*;kn2E=`owfHt@4fq>SBIU@Y z(p5fVowCQ$(O7G2oV2t1JYEIB5OVido#1Xm-Un@zz$f}nAe^XKbxpA9QYC;fZe94i zyXkQ{HfTW5mwXqftW(0D2#F&{9$$w&z2PM8c{1+5%UC$#$1HXI18%7Re`&zZW?)FM zxTr@02Cbs zeg3ITRB^3u%eI(K4H|z01{Y&I#4Ye&=!KuxGc8RjpD(`IG>*(I#Wp}P9_-piwj{Dw zxaCg^M!HtRw0`~4*Xa1}L;_@Y9d^FOmM4CLi*bwAX7l9P=P;mR#DfB1xg_E3)T|dS zWuYi12Fm$kAE!BEoPbDl5AdG}4p=w0CGtC%e$n|AhMq88o0O@NJq%OfOsw$AO(EkP`N7hST5BvCQ0beY#AI%oOA`O{E92y8*BS@}mmv_oLcEX)P9d*DtA2mz}yU_FXHe#o&PYc)rbx zz-H$?0y6QM@ijGzy;R$OARncFyoFA|5rnMZzZ`4ILj9X*9x5k7*LMUQ6(z?;Fcp7b zuOnzF0S!P!Ap4SY*&p%W-bHTt#h^x93CRu%FY0$LpF}tj_Jxhfx`dHP`q_Il2_yF{ zBMaI*#PumpP5|u7ciD)<_kPgjlD<<~bwGWt!P|LeZ(RspRq{bB$r8<4Stohq<^$-) z+0zHW+Hlx^KFRIs9Fm~On@vIN?@k10ya2@Ue%_1*Xi`Od@cDoBu?q>Mr)Lql5#ox} zW)WjhZY%Bw7hBRVZ*){aqe|w4al0j3;vxfz@w+7j{mmt=xNMzllgbK4Wr1x%YneRR zC{bI2LuTX_07varu&WW6pFN4MeO}TN zid*A#VO^}8Eq}DqW;Itu_f8?@eh45P(kiT#0eXgwC+Yqp>~!vx`T2Bg3O?G&!J)Ee zmq@>#|HtAm80YWb>V1V908^AHFITz$_n8l%7JZnLRlV2TV}1N-bn>d*$EQe51=ztH zoav%Pbo2|sa{=_@k85^^l@2Z5h}@3Z!W_TLM+z0QXh<9L$xpXBKJ zf5l4zU(v66ebeDjFbVxIlz|bWf(vkuLNH<6$J#_aB-Q6-H9fEX!&-=gm z>TX!XT3kFc`<%1)Z$m@-fMxLti}Zw4R9Ay!Ao3wq>V_k?VCUUL>ckiKX$iKcL~d!P z{=?{MR_6*uvj>ys4Ia?y=ee+(F8`5^9d5;m0BNgvVB-1Y!DBX5);x016) zBg#(7d+MdIf38qjo%y)K-m>&Qkf@u;Na(!2is@N`twViQckLc}4aHh~i;Q8NX(3KD z+|zpQ`~&#VbbBIwxKws!uT2FFDMis{fn~4D0r#1Dlk(~BSi+F8zCb5?N`ZLIydTP4 z>gukA^7|^za^9cG0^P zNx*V3v@OfHRA(VDp!W&6+Y3F+6ihOnxvl$Z#4=ClG(6030pe{2wjz3#>9(Z0=jHH> zZ*xmL^<WIK>*Oi*}}j+Aaf}vkzZBBay@6JPTbk*iyqhL-yx5Y&j4C5 zkviJP*-gWX0Vi18btWHuVY?nVOk;kZXChU4d&*rlLF(X<|+?C;l zI2l!=$yiF4&=QA!0Z|v%S1UJ_S=y+VqUaEuX--XkkHXaasx4uj{YHT+ljXS%9?4Jt zC09tLPHxzkVO^Cu^?arPSU)i%O^@UO1CLMF!QLuav%=;x&Ozhc;&LP5Jm-E*nWrq7 z9xFVLmD7^tZu=v@5Zaq3g3)9(S4L??2#nLD9 z(w)nQ3~=SxI%sF@f5lbjc6yl{)mm9$?4TYxj|tOxYs=r40ce?#7gL1172|_`9uy|c z41KtB`SLln?t-kcCLvMyDaWP-K5vuHzoT6Ma2}#Hf)W09QwghN+ks$QT2ky3H6+8= zvYCk*zK1hyD~0ua2pcH(7tDaCt&sO@q~1EfmUx@QY2>gMox-@s-w^Mm_ukJlkp#4q0*{1><)qA_8f{flN6Kpw`$rcSf#kK+UYRgJ`Zo#cgU z9g%)1h7h=)(Q{^e2l(uV3;lOcrc82AotSWiCVeFrjL2UN_hH;xnH>@4B6|a2qApU#0B}226SxRD||mE z{l}s>&rIg1QcA7?4CELsz@cJ3*|sz+zh9`ZPhCyf+Eq@oV$p=}>bUS%#(>u}5h;)E zF&J7Yuve@S%fKjV&||*GrKSwg9k@QJdNzh*v97583(Bu)-jYZp__?pNZD`m{eI2)pZ1A)1W2+Wl=LVtt~TopVtv)mrrnw|Egq0oJo)X%(z z&WCZ>Qvg-W6I!nUR!<5##H3gOL<7gefrRC-Jj01}i8Om_*-%lC#o3rjv@4g^CXsjS z`Ef{l_Hq0TR?CZn9fjr+&P_VcP+Sb)lbNg0TYL$zWfPGal3gZ$XvR1rvJ&V;9z8Ee zE}(l6ePo1P@1->f0#JS3$rsTDey}?dDiu3t6h%oo015`6kq*cPe8u9)cI6Ff;*>J8 zcNuTnv%g;?)GWq8SoP~fLuYH=YX%pJFU9{s5q~Y=aOzdV)IFq_m6}>+2GIF3w5O=N zpa7LPj&V>|O&sVW+ia{BVcrW*IR*z(v&3jE&S#r)au5<7D7~QJLaoitRI;ypRZB>= z(p=sOpEl+|g}U=7+t2_3amQ>Ns27toN3{Ekg~9@1-q_B55rvx<2fH%$9nC@w|Jv;0 z)`p_9*6!FND!rgz_oz}b40+5)-^dYAsQevYvsuU&G+s`xvAi)LdKaQ_LhxF$Khs&i zPh>EKXG+PGhR8;yM*U|l0cwEf$I#}e%m@)7dU4%qQ1@g{F@!?s#}Yp?)O8O{$jN9V z_9#=dIgs9Z;{IDm$D-U1dKX?q#;4e=&pz*-^AW>5auwz~3TYI7e+7^?{j7k+d`&U> zRAczX!fwbpsMcezq@f0c6;;$Hf<+b`5Cg0aavyVi42p8+KWrM+hzRopumRU7zto7U znf@K&Z~e{K;QZkJn_GZ#tX6n0&x~+m{%}fry5#-irK)amu^f~Tll_&SNv7wmbq&yi ztdeq+_}_8(TwN?g@K>o@7F_h;QHRR;zJSj5q6-!B#3dPZ2ez_e4hRS^>hw#0xVx$Y z!*Jno^q;Pd*KDWqF#uN1+qH{8!A`%M-e_{}dbj^6Wk^2V?85?^nTh`ak_P z*LvZ8U-D4lO8~a7gyj?6Kb2lY>L>3z10-|6)xIduiA7BY1oqM3CV+(oIZ`6*cMHr< zU7InJ2UkvCi#$lBe4o-w{&wLig6>)3!fL}v-T;ah590R9<%|D4mvO5tfEQgd&Z<)ET199)XA0f?c=^u+NU(is81lC|NvPNX!4yEV zyg19dNPC+N^DpP`)FTDfGQG2?g6X`agj#nxmIhDzV1M563f{AD>N?j9Hfl168S)?H z;d*V9iD$)t9g?U*h!eaujiW-bj9Yg+M`^qKLp0H+)&+)7cxVLc$!1?{@f3@w*m1eo z^*jue`o8uzqWScIPVI~SaCrIy=v}Soh zKJuQbM;z-Vo};_dLF8}ojKcOnBoOv!Cufz5X8{WDZ zG=Ytq8Y}p1M-$`(dALo&BV}=UTeM$p&vL+qw~Ll$i}5oJ^(Ysw64 z2|exm*-n?8{%XXvsIv37Svt;N*}^wZjbS&LV2NfHtYSiB!ki!Bcy{l#6{n3!jlfup zk~;qMpCAu82tVr$q{sgO1rdFex z;|W=4LW@UP=b}WnEO7VKKuLSu_X*6SobzgPj46EF(;0iR27E0bH#mvoC^q!3S=a$@ zAxHu%g(5nHAbY4Iz@Jx?`_1i>dcZejbtX_l&(7Iz`8toXp8r6g_=-|dCo z=z92-z{r#6&aI_Uwy5s$qDew!`P`7p)bG185XJBZdZU^Uui&CaOTjx+q=nCqE^{&9 zxn1bU+;e`86wQ(PA0XjMH#(r1rm5N_ssAamb8UW1rqC&P)C< z6~cNPrjAl`mGsK}^0_?ABOqhp+h?3$*wZ6X?Wb=B+5PR~##z49nyvhI3lYs1zaFA9 zk5l8?zn&T0|MVX0Xj5EOjMn)f#5--$5{2w*kHky<37Q9pyhIerB{k3bBI8ltgx>uU z;#DdTfyRdRzx-D95wkVgqu;NzIvA#rFZC9bm@md8nRT}}WUnAMtP32J?Ulqy!4upLSF%vXQpw#|HJk z@>K~gn3?*u217`oPqU_8(=w|q#K*7)Y@BHS7V=-eZ2k0D`KP1BUw#G~aKv3A^IWNc zD}j<>_KONd8BPXZO$Rtr~wLWI%qr=VKC0_ghO2^-j9s!541CgW%jm|e%!?UyF zg%~Z7$dlLnt0Da;@`(FG#S^bL>l=|2^wz<~v?pHf-3vXaPQJeFZ(cM)`;2?d(!<5% zJ|yg;rR%u%1lOg>owP%$4P1yVvJ0A6rFUuphj=YmWcD zrzH2IVu1d|*iL88C@*~+@7ni5gai1w^&RwxWe7G zv?;Ydu|9g9hhR+K5$x;oh}�`0H_ga*!w>X;GW<6SbmUi^%T zd z3i6Y*6RaZ1uZk=6zHaiT*=eqiT$Kw?;NxX-oJrToZJ%|mNf&Z_#yYg52th^!iSD9r zqIGgTTXtG)4S>{<6FUE%lxyV933a@KzvI*la>B%X$7%3``iL4DpzTHWiO+KE@TEgd z)}YcE0|(7U%uRuqDQ_#WGTh60?oTIZj1G<&J!N+@y@w7>6HbngZu*rp+YyF2VK|l) z`H;q2$fk9-VClNyBYlxBTAJ>P>7*|JB*sV6q*_U5FlQ~4y2@1eDo2}v=W?Sh!@%0^ zX*s`|D)Cm=vewpPbc~8ezCy~&Ss5cbP~Uufic?v%-V#GTaH+rguJL4KUdN%ypxZ85 zLxsm49yFfJbEKWV=}&FO+aT2@WZ-IXW#DNrW;Ox8F_Q3j@tjf_{dqT%!_7HLVW(`IB9bi_1I)t6-NIs; zjLEbI^Mg8Tz_=4zQEtcNL}?tyX--}1k+ryRopP8p8*4lxR%>p@QX$u5?HN~$Zv%?4 zDvH)%H2GXcNrkgD*Bna#(T>{Z;}i7Ac{j_Nlq*E4#M}B+BE`!V_7;iBQL2}`L^{G( z;Gj5Ko@DkVTa&|H6B6U7RMX;NTal4$?h%4pQK}zKaKY~U(~$A<+&0`Yx(rgSlamfy zt)R+`goONN3nyP3=mme6lg)@UePQSt%^F=~=CskP!M`{oc{z}jizx@Q$*iWSj8U5I zuleap;U&N^7UVZanb8E)YNF$p^Kh7O%8}c3|3qup%_!k8eykMmCwR-hSVLz1PuNs? zsu-VLJ1*zu?Nrly9cB{y;r8jH8dk^NcA}$XdvD!Hz(QSo@ zIKOKH3;c>NEdA7FTm6;a66E8|DqH8@`0+>u7@Y~ zukm=xhS0yeDV;2lc2K>t$k_qkIKAOGjF`Q}+m%4?lcCGQJ#$rd)AsO^sx);{0YIJC z=>q}n#MSJe_IP{idAFNCuH#Rw?*|j?11n&U(}YG&YX%y(dhY14r#i((>lOQV*5ml%1SSjF*^#naLrI%HRU|&yy4W$bII$AQW(Y*WdtJoOk zdIhMW_QphH*I(;@#fcl=POJKiT5E_)sKrAYky@F0k%7 zcq4T7R}srAFeg>`4F;q^{>+Qv&h6kYkXF*ammYM5Tlug15<~5jPKMpMXmWcLH)%}Nx@=+pEC{_n$DWZSyJ%)#+&pxQKP2_cVOzA5C zoLKzxQmYDVY`5Qx9XL%?;O#A(eOPgaOExk)OGGxpJC%2A=Sx*mr($xXY$q*p#h0Md zdRSkmz$+PF|A#dJA+0TDlKxhlpGobTP5NtbQL!KOrLU$LVV?cGMbCV3Tku*E$PjXJ z&BE#6w%U}_HEep`BUQOja@)=0LV52Wlx(*Jel0At14`45hZBtYJ0iRv(PL!qR9d2C zzNXWRk0d!0@Tg6VN)ema7Ib_i*&0d|k-|tYiR7Jt`NWw&3=!Q8#$Ij#bhwo+>SHCV zX1uHZM@yR7|}YGi@Wgng8HP6ShxSTkuS%X_qlT3~1F4uib%n(p__ zefjdemBX{=iAe9X7qYHF|A;OjoeltMUYQyc^a=TzY{&Ehi&bCZ;fGrVX%z@V*BiVB9dqq?$De_4~uNXofqs%PN^Nh-f&6+ z=3frUyB@m!P~XUjI;pGa*QCG)xCp;)kI65NBBPq?Z|=DEzpIPKYviaFob!PwITH<_ zZWNGj=PMF@{bKEJa@;vbE*gG-@X|>+$R6I(HqdaVS{6M5Bga$O5050Tqveaf1|1+K zrJ|yujk|Vpwo?bhy~ndeLozd|ad2?nj#d4?E}8!}G}m1(|FyvG0Q}S=hhb4sN@ay! zMY%qJqYhMezAnrVh*5oAo=Z>cr2vg!6kF~d9o)AaQVu&TM6R|Up@~lc5e;kWm zY__4nW4QpsXJ6tsEGpy{v5@l6iP%$`%e#Y>p2SoR^dcx9sLe#n(3P3p&6fVq0jxi~ z5hz@RT0P{nV;CpdS|zR)|F34kl*C)UA`@fg%z3cG#4zMQC+ye50e@g9FFVVXRo|-F z;Tlis*?_0>!M#gWwnWcZbV;-S7JllZ$Y?xmIfxOiLN`lO$CbRd)`)LqY+q}$ZF=cw zIM#VozElqmpZ*2Fd0ZN|fmM&cWaEZ>%EL8SOOT{hVp_IJB#qcoJoiTu>rQA^+td3 zA-Nb2R1}M);ZOH7UWqCktN|gjjm7YLzGBoixjFNJ6EO!`-3V0~do#g$v4ikA(}$B~ zP)zAWu|~|CWRt4hz`Wwo(z~h9xdPR+`Ep9*1MR}k463T;I59vOZgBxuD$Db&GfIsj z_0M*xgP)MNVS6(*d}Op4&~^_-j(r{LfH8oZTvKWi)^ed-SEFGj1aK_u?=&48v@$$6 z*=zG;NgJE0dejncfikN^Ucvr5ooYLqawB5!rGkqs8XX>=|aB z#t(SQ+uX{ueQ2}eT0!$=o-U#p)T9NGW@gJ2WF^+?xB%8Kv**TcYNLkFN^Xuq|EHu3 z-fU5u28I|#QsGWlBri}M?x>xMgF($-r7i=wY`)okD}!*(e-|xUuKk%5DQ78G`xDGU zrhNplRCVE8I1aaLs2U~ruN%(b>8Ni$R%)uQsNekxQnSZWWVR(JlzAi*heD%-9xGHd`&_>sS^xQ^-|Qp^yyvRS>~z z+5d_;Zhz?Hn=0>-LBub{HmG@rsCAs6nCyciThp> z;DeoWBO-AkS*YC=uj3lfsd#^?|BGPt$5~*-HsVw^ONuWydVr(F#fKnP&+E6X+Y6}@ z^Jl&}%(GX)lT5+>TrdB)Y3v41iU#XESxWm={oVn*>-O^34}+MEpfRhdtUML^nJ^dh z4Bjdy2`l_BuR8G{qRnO^sc{CTUUG*!`R@4D1CRFxOcBZlF0FPkXAPCoD3?Kf6vjmVbm3WbmB z375Oi5dlk^jqq|1S6=Pi7|{Q@GRBJmaTABcIq%(b(ZDX&F~+JfWIqQDXxOnhiAMs=w%Me=7GzHcS-o&QKhz_K8_T?-%4v+=5OQ=+9MGU-a`v z&bilIW0`UsGfOFBxd!Gui9g5(LAn08vWb4KDw%oW;79>B-2W~DPZ2ojUKUg~IG_7S zu2mt}uNZnw*ggZKy@!7H_k;qm1Q+unu_3F;Z+@#n5}buqJ{ao9>4-%a1jEu^&`&8T za5S`C#YSv)+7#2}nid@0#7BVv9NseX1>4Tc`b@O7gYBY(Va4OyRWz zZ$eV2vnFw$ViW9k8Dynz!WA$d@hZDt%v+tHB!jPz7F& z(FRHa9E2sbgY~DvedDQ)KPaV%+vcmroxEY3j#k+di~YA1i#8}(28V{i`8Y3-p|6rN z45X3dY@6*WF_ZN~G%WVhI#P*y9lmmk0 zwYVJXyu8DPiD;(zyEU9zFU^ZUOHMEl1V_I09L9hMo`6_@rmAe5#F|B!!MRLzpi7f> zRpI0k8T$;*fOt3|{hOTm3?AEm*ZgC5OS-I@-1c!j--89Vk`8&rJq^1{BRd@FNW;!4 zWd0|@mQnwn8*0oFoJE7};l?aYW1Utm{<13PW3xAr9m|Yt+go{PwTLHS;8W^ftkd>?V2CBg)#2nS`q|LdO8(j2*P1Gqo~N@*3V0M3~&yP+#|C|^qY zTwYoIa90N6P9}cG#sYOG0MeRz`q1?-l^>Thpfp_(u1mSX2a}@RtXV*yFutNKz@y5i!~s@AILRL&+Z8Q z7i$_cbqSCRy^g*bp9o_;ZAqv?oENGCtc7m~UmOcdxGhlTsRwl>d)h?{_{XEuVmVjyH@yX=31hr_XM%pJ{Qk)(M#d3fKT&u-j%Y#+I|1k`iLA&1;!+|39Uo7jfUg zVb8G9{xz|`|Kk`s(f_xHfp4H9$tURXhIF->uKQMY3=}xB`>SvM?KQX(kNChefu+lK z;_lr~(^GqlXp_jU3zPP{?wPAx@B2-0fV?^J{36Vw1|$~c_7l20K>%^<1MdOHDcSo< z#>e`2pp*j50tX_v@}SiKoRo>oHv*FA7 zZ;Ra3NAc4pz^;6MDKQb6(|*(4k?N!n_%C=?{!?Vv6UEat$rtZtx9wqlwVZeTr+wl} z&;ZHI)KB26+jNH-8rU$Ul|#BixbmGs;hPo08sOd3izzf*kZ9hWu-2 z7D&$+I02V#w(A8~=4v7nahrLE4M$t$cFos4?-Z5eo>?4pN&zp5`?Q(JYtXmFZuH4F=s)#^o)3+b^1&vii2SnX+iVfV~c0m&6@e-0kmi zkPHX_y~!bM+IfKyQ7Tlc;bQDZ!RRg1coXPndMF*IgTLatsK5G>X^*fEVUrG49G4T}PW%$b-A z{dVfA^JzNP$@)6F_+`VxALjHWnlS+-W7f9|cc57azK|{-sJA#BoLAXjfy7?C)hu^L zE!!3(8kS)y0Pw~$$+%lZK9TpcY+6B>q_~r%t{=qa*hlI;RY7*ESz!bRwLjH7$Yv{h zdzp1=Y>cQz;28P~t*nOPN&U-(_|(ffW^OAP4o=3=a|C6)jSe^}BT1$z1c%Z!aFDE+5f4qc{xj2Bex?yi%K~UA(@bC@KUq zy;xmaTgwacY~6#eAzTs1$KQS*t6jZ&YVgb^Mr*P@9`)aNx@&XQo#6M?o8yx32)*6b z$Fmv}lW9{_rf9Ii$`o2NTd(EE&u9$ONc60+U&aVAEbH!SAPX1L>oHYRUDG5kSl%^L z$0J?zo1(mYn#Yjo+Ku~J+)A-AjZM5v!O)z{g=T0ER*^>^-M0z}lgO@VPQ^<(_kah?y?O?(_}e&C&QV zcPS^bn(ITCW2f$+K6~eXSArrQp)=amePQO*F7fW0S02FnM=9&x<)zvK5B*M{e(VWx zCPSM$itUsXBg-cCzl1nO?yYCFiE#}4dlb{eaSSrMr9m+KA6k|yFWhd(vu-WPJC{4L z%gJMUAg4JL@_(;RJ8#n@hMyf+iENGtc8atMKidiX4B-iHWf6K6wbpMf^g6+S4BgE0 z7S7Y%QA0e1M&Xo%jwJa0t_|a5mfY>Gosnd_(eAJld+5k@FSoMuWr=L_$pK#|ogt%R zVw0B@HNwWX7O#HH%`l9D=1h`;dBA8jW^u&#g|F>%#A2AA%k$UcN3iEz0g!FeaR;todrov)%U+^f<&0W9hz@F*m(`%tePEGV3-+ z$x`i0y#aCTld_vagZg8%R0AFK18YcrTsyh?V@6ZI=Z)?%dX&bcxWC#K)&+i%9)q9& znKXYBh!tJ55c=V^e1Y3#XWRqTNEuI410Ez2_rV48EpDy)=O1jh$ze;S9QnNUkQwDv zi#nvpOIVgcMs~+*e}ND~Ppp!kJX&S?Mrtdpc^AnmiUsknWsCoeyQd;!thLuG?EymHS|uK!I%@2^&R+|QJsb+5#qwt zi0zB=QdVwhI(qXDVpVfGzjgfu_myhZQ*&eYv)*W1!4!ufvO1pc%4!j^t@~w0Dfr1t zE&@`qaCn|vH}a!_up-*vO=PovTCgF^wplA7+_z}YTRrESe6l_oAwS2q_UxyYMi-_l zQbf|6(MB56MN21d2m*^~2#rb^AvW}@kYu&sXq~O#Oa)m#J*7Yp^PY%}A<6zpX|IGD z#ig`x7cNDh6y%-jaSaR2fwe}s6qb9AKmP6Q);Z*wrIE$XbU zAnX9dDvk^LdOE0Bf>gnk_frvxrGKE6dN`pa3^*eNLbV~4hQg*ph_7kyc%Py6h4;?Q z;V?7H?JiV5U#|c1kQ(&2dNwPd|NL-@wzX9Z`}|7A$L?Kd9VjO@18D1t+TyuBm6!1M zQ%<0vV$Xv2>fHZkK3=aHxUiq2pZT;?_?kQMbBKVC#55(@jPEZ2;stxWXEfXOGX!PU zpwVNn=VQP#7;fbGbd&wFdrQ+%wo(j^M?BNf88oyyBuKIn1qNP0qtN@^b7YqX?*=)T zpqPLDG#%2W_cVZRE-9=g(c!+QiBI>A!ph2OcfI4zWL+j^u2_}2HPwADz#XZjJBgOxiXPad9@J+xOR)3v0X7k_Dr-Vn_ zv!oBTG|W<8pM*0WsXwdVg=4zNsXu=5v0Del6r8rlb%RbFjDum*wQ)#qI)2nD<$J=b z+>YK#`3dKT9nA8a9_Iy zNN5SK-0BuQ2YFwSE{f}G4Ur43W6JTgSwMk&GPD7v9Zst_*WP@b5E~Mp}F_ziQv*)AKjv+8&BdBL1w-7f)tRR25>22Zr>xs`BD3Av$4)SjAdsD$s?~5L7 z6Iu>B`?#AAx@j*>gZ2W%()J5t6+`aKR=absPj|nWHsxTuGPPXvfIbOAPBjM>zfn<0 zDPYZn>w~u-D2E0T)Z%&*y~V|kg2sHzd%jL*mAQAM2((Ehl&=g|?rOcj5Uk5(Hb56r%YVB+6;N&meZ zn~Er$&ZE>`77no+*q+>-Cu;Ry?yU(Fr#QDKSPS4Z`?Z8qFxmj^Ud6_%7vzy|Am6sT z?0=1v1UW2|DyuAm8@?Jk)E0)nDLx+nx{&s6zeO~A*T?sM?c~==q^>H z77;#Rm{z-MnC<>;<-fTgO;E5x+1(XGNR#OqvDM`fv_dZNa#0(Pfis4urINC^Ubyd1 zY;oH98k*bPqPb$AFo(`nKiH#HSSOoT6-H-%XJ`tdXY@Xw3^OM8NNf0u_s}TubLAJy zp@;j+!!5b1ViqGTif)A>VU<~*9=Q@J%ye6``T_=KpEPiOi7338ig6K!V!n$10$~e@ z|KdYovqU~FCt+5;Mr!50R_SrqJ2FJW=qK@l&y}@Ul_IvGycjuoW$3)$WGr%vG-%zh zLbmzOoUB|$yZmeN=>6cA;=b*rCOG!Pn|(ZIG0Kfa-_Ut`Q7-G=pEj2Ma}05W-sa(7 zdzaH)AEUy{Rqhd&{Suf+pbcfOZPsD0WTBj2oWqjS;3v_r{Bt+fQ4#iiB5NjEdAVK< z)F>ue8G~Nl{_qE-0cRAcLz9@X5f(6AM-vNv6a3nY`Fx!+=%SAHbj&A^Mx2L|JRl=_ zq>EGb`t;&;k^7sHZ<1|_T*qz?zbn5>bfLSDdkUhMdy?=A#FDTTx8$748Qu6NE6C9L zxD!U;-_{{D;whD*WLnxQ2NUAG#SAoc@yFs` zqO^#~Gxs1@y?=GOnBjgQbMr@PudSd$ae*9G+2jZq z{gHnDQ-I(qI#IUOZpwOq*D-Dlc4*ouQ)Z5xOA!J2&WZ|Iil_FH-Q;=1{$3zW>aRF; zh=_^$@cCT_GtM~`OHsZ68%9I=Ug?;^2LhS(*=Hm$`Bze6_+c&@rG*bX9957wSzDE7 zZxOFu1Q{vy>a<)q`7svicH8$nx?sQB_Llz)$QQg~LluPGl(KVuG}9z9=5Gz;tJQds z8G1fxv&p4aq#GY)FsnxxC+BLD6a-V$d;JXC>iuY}g_>*D-=j91|}a^ zW$a#e1VA@`z&^fa`!>!Xj-I>~hM=qLGLUnOyq1X|p(6LW5s1CNWbDkR|4hA5-5q>W z$HVmoTT-UO%-$YD!r0OM?>fO)hTxs@Jh;{RbXxy9edZ)H zynwCERxURe;8kw&Gx8RKV#vhPF}-2UP)`3 zLhZu?;>d8-{}%1yDmVZ5*nD<1;JaNOR%!EB`mb{r-^WMcLp9OY`)v}SHdnrs@qazT z=>Z^Nb`*AIA8yu2RlKtbjk)u*Xn;BTWo9 zh4W~s(1C+s3{@b>A>y;(3SQsPbd>G`YeLxVQXh|(6?XW<#y?)Bi+Uz(+g1e zmoEjsTVGRy%W?^s2{ek1;xWs?)25^<6tmv3NB^Vz5U^5 zYfSIRQT&KT?=ecXOh2QT*nanFu!CH#sIKHsK09nwX5EqEP!UE_}rpMa7=0pI3(w!6-*IFLvBr5 zWS>_w9tH`fI4T@LhcrmVCB~qALumH+^}I#a!q=X5ab^!~;HV-@U0({b%51WEaSB9J zi@A&09UqOp)bq9y493MtitS19D zUx!6SyJ6tFBpJo5D*a{loY{09xxk92?8bcdWVtHVxy=ln#ByVV(p8l+lrm_jr;~zQ zM?IL@`R%d}(rh@tfqiocfOFD6haga>IpxOes)&hbp^ z6Mq<|IuSYq(1o!zv&SLdVy!6nSE&^>F4AF{n^(iI2d$pZ0lG`cp2mqJ9&kK`N- zh0&F*@67jY#GesN!JbJ;RAg-jRnI@~gL1QMA|}pUwobsI51mG{Y*y+t9#+)gaYynX zk?mEo{;V}zwU=YZi172NtlM<>>Ghyi<{MK#t8(ByBdRa9N|tqjR}$_Kt%t-_HFJD7`;n?=1L}mq!=&Q0<9WlX?cHUsvX7=H+;NMiJ;v#Et@; zqz7g>m5n#JAdV+o8)q`%_h3<0EBVvhX%i+k&CjgwPfgH}a(4KEQn^n<^=*@ zA+D8zk$ytlFv2g1wAL+wb})T#LgjrxxJqF$*T^UGDsxY)GapNgJ=S4G62xu)A@jCl z67O%);goG&1+MXzv0$<**|9&t0)Y7HaWT`S!R51LEp}Aw7Lm1E!cax*MfKvZmX>$N z9O$`(tihVTOvV*Oc>nwJ^6fBQd@@}`dxf`l%2r-$Ey~3lQ?s&pwJu5CTPf1H`VL$J zE>!a{uto}{Oq8^(oB$iH<(u$<?^KSt1fpH-N0sa%L2#T-c>6Rg9K%0l#rY=4+&7gqB;v zN;wB*J#;aRQ~k89cV2M|y&TFjDiP7_Hlw8VQn6tr>dOyn6FF6Cez72Ig#`s9iHx~! zl{5JO-r~qb+{~Z7Qa2kFy0+jx;&@r}LQ4(sySB(^b4_)MbYcufg{fTfs!-Qfbxq;b$52=VeLcd zO5tre6Kmm>LNEvZ&IBEdTZ+Cv$2b#ebwMCen38%kw^@tSL)j;1n~lT7r*W;0IDS2)&pxfg&qH<~2)q`U#RZN2yxm;0_rB@^9yvOvJ$mJfiwm^# z9C0r~lfVZ--72FNn55I%?%FV+=2r0YnclWugzZaTS;B3C92)WR zKL00i+?8N@nk~$MgBg9>iyz#=oy+p^tp6X}-;TZm9q6y3UdPot&Heer!lq(8UFWm+ zwc&oRb60tbY452p@!pFe$6a|XkUX$K#pm&1fS&#&V%6v}_WXWLeAWMwXyI5&_9%Ox zwB<}82vKHurQ!2H=E`m#|06pjF?C||z{v|da=Qc)6-QBK@il`beLy4Q#&dxqpj-kH z&+M)72jJ<9(A=co7O^G&l4hSTH}!~8%Wyoe8F)iata4V@egBG_1K$%qio&l#R~FY& z;0X1}5Kd8Zg;l(?N|RV6iH0xZG@XvxexQ&Hv`6Qm6Z3c}ce4T>eJ=>P(Cm5UB&b4{ z|Dh5W{`@e+1>IL;g*ws32k}bhS=)tljkwHIC${^3`1>6?Gzu(VS6Xud2)#q-Qh{-e zLlk|=C+ctlW$Bu)Fz*V>h_iMSj;!5aN=6kP#!2iMfYQM;SpegOjz!bdc}>`I!t2)FJ_k?2~)n;mhqb-qE0{bV6em0IW+{8UBXS5f+8> zxajpOJ)k;T_uHv|bYqikhHj-74jA?^TomT{-@d}@KUD^#6;0ut5dgBslDnemAlO65 z5jtH@J8y6WBjjd3ygKc+VqqVhJdQKHI8JlO`$j6MyNIxlZ17xO6rDxg5#sJn&mK9s z=v4&m^S=o3Dew8IFTM-xcFJ{)&1P!Lg9ge>1j5u3b!(jFX*)RuelHlI%Vd!{&d)qX8^)W*vTSY>?!1;QwCK!Ly>h zlPPihbZ$RKDPidKKF*VNvcuT-e0MQ&ybWvC_v)K&S0~TBPDB&dS}J1)@9YenC;Its zup{K#`DY3nDHIu6)!hJSxASv;$UlL;#SbYI!ixs17B!@6x#8IE>M-w5oTFF5-84?B zkbL{?yah@+^xNKrIeb)^?3V&|FH$(ELPC74Hy1+AT>eBbi3BYG< zgu~xp4wiUfoBG8X!&w$x+HtI7zW_I_`QNj7TOv0|aMsON9ImHMDyX&BE2eux)TpzB zI0mUB65Q#Dd{!h)6d%)8QSJw|VMPljdU+p&%NrP->|z+b^I>ZCwcQR;h5kFtI%aQE z@T+X3I;RR%amxkYm1Tu4B{d};O0%4r>baG!qCcbaZrh27I87;eOC)n;$Abu)3J8w47ZHB zPgoVqL;MWd5-T2nGVs_nIBe!)d5#s;=wuc^+( ztBM7;k;6r*o-g~$9nsl7h5ubhHIaeROI^Xy_4bX@YKt2V$@f-z*#?Ejj4`Wx5=TMG zzp||*9PBb$R*3JTUDT6O<9aJlG}1u@l^jDdT25IAqsOEitV0r#+M>}e$r_5VB^7-w zsS#`*8FmxH@om@Xg)$`#O+)6@-p|4|hFR~eN$M{@IM&GOg}u#S?jkOMjQ!LZO>OydS36)Wr+nuQ!M+dnu|K~d}scC%hL|7=4Nh-)2*7* zPlyvvGfK`5FN0@UwG3%NLhaysMoay)QiOpi=hRv%;9g(;I{_LuuMdc^)nspB+D`B7 zNej2MHQw8Uzsv!A$*GjiL9@c|Fc)e9`$}6za=tQ?dqnTpg%q5uvZ)rI6z65R{;X1y z)h2W%GE1Z)LHr6qiOH94b$5b2^wXMfH}`(0B9HA5^tRhhRv&Ov&^+)>KF}vJ)xal- z%9vE&2@lId_8xMIW<%aV;ND9 zW8dHuB5zTwKFj<=-zMiZ6rDoH!mEz}3uPrhA{`97aE(D2it05OZH#R_tx*m%CWV^9 z^$(M1TnZ0F);QBUl1auiQb>A{Co@r$q%x4xSa^ov>Rny+~HejimC(Z4u0>Qr_ z5~+^gJ51n4tL(F!>AsgvpjOpNX8b5GIX#qf8n1EVe60t0vEGJNl}0>f@YX?rr8vyy zE`L#@BCzjscK_8qEro#N-9mOw-ZU*-Qb?;kNahrmBipcId$zS3%j9qtsnCEMS;;ob zpBu9C#p&K311=9bT^ozkk?H}#4nvaM)cHH_UNC}5;NcI+Bq-9PiZ`X_44L$X7VD0+ zu!6K3gcv2{<|b{VE5DDclohATtzw6qu}i$zp)#HCv+x4vett(UR2zZ6%w#w}%$8~{ z{tggN;WUclHXFdOEUc_#N6r*gNy-aqf|??-k@< zTd#MtDlm!L%(GF6c@w+2xn1)8{C^@W_>WHx82zo$eh>H={%JiIdmJ$Nc;d#` zv#q5WbkB=ROv5@u(H=0F+Qm~pkEq`TRlHGGZ+@!7MM7pj0BodKbN7!DgHOWjv$XEv z#H^sfjhVHPA6(Qt58IpEyZE%W*cL#T0<2#ySOgn!YVOX&lJ9*p1BkIePc#l2odZYB z)fbm1l%oF6XwBK5YFc*rL$)JII&Sda-ZUo~X-52b5)B=YgFXG_5%k?xI$q}`v)PO9 z`(xDwlGYXbj*$}U0-dhq0Eu}q&yQntr?L2h#K&kK%>nWq`Cp~se~AN3>Y@@igY~HQf(O{vaM3=3TU}X3JcL=8*?M$XwigWG^oKy*5!) zKsCMKtuB7x-`wEMpfZS{jAr*N=K3?i_8gI!XghshJ>^NrTD>)fbMk6EgT6OIcPSH6 zrVl1a)V>B$sso41J%R2WfDhVx5mM{hCQhnG9qa`t3$f*LX&`xbe_+~ctR-wH<}}Ll z#LneYz#tV4d1F~akTNNZ?orWGYSblr7na=Ocxmxe6Cf1#+Jbm6C9jd?POfvc?O4u> zs$tOut*E6rchMMZ$wdPRxxMLH;l~y~O^c@<38^v8;Mt9H>Z`L_hfr=uUP!xf4JiGb z#p@|}GqH|wxv#o6Pj(e?WU>T71bv)G0m~nlss_F@zq6X;E;E}}*NTB1NiJA)Gz&|i zl^PRQ9-S={oocTcSFF*XrJb0VEyvE1JXqW1=1&uvw^f>0%;MUUu5nGOqNaq4<<2+1 zeYmklo9klFshq}=ORF>~njtM>WQc8=JYB$gFP1J{>7it)u>opi@4)dfn?_I>o!05M zJ^2W+Si~D)e>BOI#;d43hBE1M{xgUwF3FelEITKHi-WqMsn=Y z45Xw2uqu-J+#lKG4V;psrKx=AsgosJY&wnVq2?o)oWt*0!{M$;r^#ryfwrX!14M4a zbldIs=h_(B8g1iM*_ZRAN1FDhyNC?H{#?^DLEOW$>a_6r5EDC70HyUBbtU)e1a?=5 zfysAs<^jP)-JY2_0(;da$Nayis`HKw+(F<>?hb>|EtoYL?H(eABW>Um%>ah;RF9yC z+vv?qZ<)peNbbaS!4POSLjtwc+S)S@lAyI5eH1@PA zgZMMxa(X5-d!p3wc#d1WuXMQ% z_4$l5{lts;Z?_0*!O_b9-5((wzH(-H!BBFb4(Z=$-9G*77DguW!q(ZnhH2g+MkZ62-zV_+)XoatP32TYdH3{3j>W;?QHA2GhWr95S#Zia5A8^S`l)Y zD~#5(yx|EaW~CYw9UJUnsVQG#*Vl?J*C}_p(Pgh%gKw=gSLf%6(k_vIK-|X?qIS39 zIJ6oad(fTza(@3UuPYBnk#^w2XVu~iuyxgz@(O41^9NCme7}^Lt|c$FIlh!c zMq~YSCZd~|Y7$29_3RC*VvFiC_`Vm|{TmAN@1rr^o5N;_qAolGbM~IoK+Sue>K1FK` zwD$lzy(%|moL%TE0_?It@W;R)J*OyKmUKiq{^*m>e;@UOD#Y5If@zX3GD=4An_wA{ zsZ+8c6fSVeHDQ)|8qA-fcr0t>;m}a_xaL5Pd7WxQ8Wf2GhZy=!A!%{@!faO zsJ$`T_^~o+-rv@(4{%xPjLwuFYD4KFwq5hhOV~Or9n!=u^SH+!OiUiHuWCmvI|!71 z7WBsX#xkUqYVv=?N$M7YW$)5A}k#5n$d~*$nN>)Am1gQnNUD%0@(ct!obwB;_au-2_{}UG31huTQFUlvZeLsx}t5Vi5;)9Yic-kZhi=WhSMivkZ zRV3spagobd2RK)zkK_il{F0cvM-8gvD2ry z#6r}1mU(eyWyUXO9ie%Vy4C;L`#$ae2$-Ln6S)=>S#_EcT3FFwS?#E}~T{|m->u|R+I**ZyI@Vt29&Jy=w4m@W%DE-SW4U>nj(j-4# zpMm8)JDb|-Vu>-rTiYvOg@$D%t+lWk*6xB=tZ51bZsr}VjjDx}h2I3A(}6w|={ znGlJ+E{v_^e#8{Wp+i{hx*Sks5qLF-*L!(@2oV{_Nk&rU4v(`re!~|E_>v2og{~&@ zL&z;dgz_^EbOC)k2sWVrzMc$x4_gCa%-_c=fs@JNTgO{sf;CK?3&`2qI(%vo8%&}xEu+%X63pVF^$)grc z|JPHqOL1;j6)veF#NLP&ECMw1ME7fHJir6#u8w`wl&RMHMe`q`nJ*`~M zR~l)u+^FMQZm^7+XV=ZOOLyO&%q41bKf+|UvuhDtfqF!eg?qP~)ughbc?m+tdPq?GmCu}C>c{qA5 zH~^=~{6|eZNsdFv_8b}bS*zCk2dOq{iP?I$AyEGIlvzzPhVnGZZA8lPq4@?wq@8HX zHC{GjBi?a7%nw84H+t@#`_`XdnJvEF=p0e5vl&_53>;J|f!Oj3uqOu_r_%*3K{YJ# zT2#ecBL+QpASz3-I%M4Vj?H<7QE-&QY}m>-dN-4}Mv!h-AVd5S$N#~Er^z7={MJRz zWL{8Bnc6;#SbjPr9@LGRr z92|dUD3=vK<_+0k&pYLAPstzW;w^I9LHK3EDDM)xgDryZ;)G6LO8aS$%-fr&SMY?d zTKX|Yv_7_}odlg6==?$J)OvU z7pDgd_ZIcU0f;dh!*3DIFyGYILuOP3g8QHwfyA%!2zQ*R z&p$2o&Gw&k9;k?CPG6zb9J1$vFKJH`ZD21^5y<^R-JK*|$dw1}$4#MUN6)8i)Mo_G zAoO_J>G-gfYla|yUQw^Wm0O4Wq9&@s`vYbY)h&QVM2o0+$gak3bvO+=qey0+T&JFr z8kDs9MZ*U&m~sI+Om%)Zl&UhycpKxKa-1Z<)`8hb5Ky2jvw{GKikl zWJOpBzMWiOTN1*6fSGF?YjUiOoLYg!!qJ0DY(mFaI@c{pY^e+*o4J1LTOGAMQ1+v;hb_I{>B))jz$4DD0Q zJOIGhc?=qav{8Qw6uvOYVmj(4c3YL^s}uHj3!(|Qd{$Sh=57aHZRcrZC=dSXuMwf- zYdt9W^J#GGQfjsENdH%I?CcP;#&6KTsi`pkF9o%+(h>%l6I1&csrK| zB;1N9Vp!=@j4_nfT@g=Vy1T)=##{>tMSs&Q^UjXrUUKb+^p63r@r=9o7jB~8!nCedt z49qVwu@X)p>u|f-$Tuh8_tWsiz`^UALI~-^DLJXjuF{3Rk$2j2HER4G(_Y8gZ(W_` zcdvtwo{w9VjwHX6#3_H@M9!#*OZ^>-C~3LC4{K~r_@W+NQ$U=7(HU9o|1mJLUE6ph z&1$j56Tr`jH4u)PU*mgeJW62#Zq3iEG53B0zkL}+Jz`-zH<}3I{|;oQ4cF2{uC>?3 zcULBl0*YvgrijGQy4i*(xqB~)%#rEs#rKx6tb_R1iNMmUGTwW&>xYode5IAL@$!Ni}bwVG*&T)bg_*Qf5SdKIv;&u zbA=jQ}Qx`(q^RMggX8ZqwKS|ZT{{WZ7*8f#+JXBRQ4F;fH3mY}@``C0kv0Ii$JYCt;q49eH-y)xB2 zt`E&Y=4UB2vu81Ms)y%)ANonxa*Nj~;HpUeRu!&_Y&0!z>~M)r_~CL01%2kUM&Jlj zb2&VV!cMJ_q?9kFQKV!JjeVLgVp11M&WNZvFumri-vJxqIspZ;y5 z3Ydn=YMs2ys3CBktkQU+pdAjrPD&}YfcE)|jrC6$1ue7nZxgN)yuC?X++-B`B=L@vmoJP<*f61w;#71ZGK|ulZ?-O(Z zPtS0~Y4PrGZ$g`kS2>Q%1p zDV&rl8pq4);1HfMT4R72kpU>(vS(d^M4Ih^ZZVh19B>#``0`xyK>|m~@4-0SLM?{2 zAf6nCW_A{L-{NXcM$a)UdC}@Odn(=6|IB!3v>3vLHsD-*5k{2mbhoO(T(w$A9PDDL(EifTzJUPqzyw4lu8j-?2)AX_`KRMRuwIB)BSKJH!|?%=1chr`zpy-;#;@SRXsT6ay4~0SXT) z2#dQb8VPmLq{dbCyj?cmAC4t-{0LPCmBo+Zr&0gWNB`Pwror%QH?!DoD6>D;f`6(p zStPyH{IWH(A=a+N)sz+2`P1P*_g(jdV8HI1ZH|Ds%@4T6Cxd>?fW{YiZViZ;({l89 z6-SIA;e-3_ExCplv}abMMhK>#r4OyLinPnL*yd;xB;Z`;OLA8NT>2n`MnAJX`ixMP za-*0(lkjTC-0Q&4289f=269tR2LNNYxRlB+-V|n~NL^I7w?4Q@>u)b)K<~MLDAsB`CW$5abNoTM~eoNE&ngK3L@}1%&d?WZ(Fb3}P z5P!=Jmt-$(cms!Bgu<32l}#6%P3-Bh!gxSsvKiSN`TT}#dtavK3R{^+)JR6Ic=Ez7 zr}GcS$Yt#+0()2MRv?HULSWlesw$bx8IyqMr9$8QCaeYP_f9ZtoT}tQ>pvQL8uW1@ zBJ8BUN~eYtqU3Px;%)dA7PPnD$Ceg6FKGSTd|6w8CtKX7AHA8Y=J=M;pq;CLUygJO zKh5*oxTt@6HK^r&`!drymr!=4(27|yt|)WXFC6t$)SiY3Epq}?q=>cIoDR+tK@{ck?% z2M^ThFtP-4L$?)FzfzN;eU$+w!Lly`0OJdj zeBOH&ERv|H0^pC4JoRL%h@-E=C7*uM3Gfqo)q#ka{-~!WhL#JtijuRqj^DR@)reW) zBPkggm-7AwNUD~hf5#gW#~Jf6AwQ9jA~uwDOA5{-ClHW^WN`fo?4$qd%G@3V3j?AR zPYQ;$LuRyiP|Q7?ye$ErrSlV{VL(D`d@4!y8_Gk#prhXw?~lHkD~O3xSwjpRypQEb zwI}S-zlZ)j&)-NaSp@bTFsYU>;Wggx4v^?dMX#-u+Sn zi$bGsT67!WkFj2rRUoNSH;Ap>Fix+dvRrnS6Dp>A+mop)+To$qZ8|JD8fV(N6K6WC z#fCmyFo|p;m3!;!o(3ROg)i`PHk+M|<1Dtg3z4!z!Y=6Y572wx zEG~njWybB|K6%`gFQw&C+;dC&Zj`gcxE8=h357Mif3pT)cLAzC5)C>c9Rn z28}kx!~3rL1H}oTDIMix;y74DcDJd2 z5q-7ZlfdwC|HS26BB1!Q{nKdJBi3_%LesqttZ)zZ`JcuvN`~Fl1*`DyJ}&2P${A^dJXP71_FU>MVC#&cKUg)f0S} zRhUtVO3%WPg~>|Zn~LneYAY0> z)mO&PU5=x;*B=_H5g|J^L8XZEK=D3|m+|+&?MZa#4qp^<36A4w{FI?CTx`WJF;v!i zx6$&CQHHQ%513sA#Df zG6?VkJ`X@18OnV0(&R4$+xbx}h~5ysq!~Ax=^G7bMj{$q@kD4CK5xqIS%06V(H_X$ zvYx@VpI_rztQC;+wxM%Oz*?mihK}Y?$G9y+6^gOZfY+ggPMkm+_rP3h8^K>UGJ&Zd z!@c&qhw~zU#Y?|&zW6i;ns1O!9&I3nTUQ#FE@1M@tr-_AVN&?5NY$&c!le+V-JCYp z{1JjnK1V|~N6pk3rV#_lDAz)dvz}jt^E4F8M}@84{J=OWj3Z@*+^nmTZeYK>yI743 za5PVv{H1OB4slUP7~yeG8CrBeguio$adq06Gm}T%klq;d70gj%-&0z;B2lTR1mQaf zl^oL|So}oh&(6EGCWYgF(#YLWPKV|;7yk8eUEOL4F*B`6ru^O5WQC&U#Z!}3I{*G^ zA?3qAoshUHx@#Pj+(H3_GGV#>YBLNHYGfdQD+Jt*?fcY0&|R~*fGi!+W;&#f2O!@L zk~FMxbgl`GZDe6%NAI4sMNl4PH?}m#7Q;rj{3|J4KtH*Y>o-3Z>Uyw4EnKuV3Tkn9^~r$=?u?YkX{ z5|4S>fx(+rQ5`=thv>N3#y<&%SM=o%F&6f{ypy>2mH!C$Ty1K+ZoZ8E;wOg5Ez^5K zFwNs?dk4G>@2o3n{Z|AK$pQ?b_cbXAJj}68RUM>Om)Il4n!g6ZAoa&whVhP7Jfo_c z`8EVzpFSzF8*_<7l4H0CAM)rC_?a-7k~11Dvro$nW^TOoAa{@kWSYT0J5eeaa9(3@ z0TY4R+f=OA@|B=ifZ=dedzwGebza{F62x5{kg{95D5*RGrEH)k;N?&HN91OL$G#GU zK`~)}a^`SMi?0R_ZrDl+$A$w=;xY|L_dBSdc5+tgnT#HBR24&GXM>UEl^8E zC;+!BUXT*sa6?TFz&jgr@6D$^V3NYop6ozVQa=Y@Tu<5e2XZ5?iNtRp;nV)&NU(|K zQs8ZNSU=nPV89NCcAzO@=(Ddp&CT>db-pq0}RI-}yUW*pwPR8sH}x{Vdy@0CF#Od=~(iqw)q@(oi#KjEju zm8MLkS(U*ktXexIr>gDCDD1nwCd7uo<-y9Gfb82KgE#{zDmfw7?((4iNK1_7$C;LC z-(|emVWL0?No>Zf!na~Mg+Ry=Ua^z4wL9e?milPb_MbdkL@@c2v)Fcfv%~iI*ilQs zE$QicYn0x?k@^U(8AWf4|QdD$sW)WXj=)*yZ`Qds7EvRL>xZh-O<3&<6aeFD>-?$y*eC+w(Z8 zMq2hqO_JQ;c6HX1QAU;*-q*3V)_@AeJZ$Agf-$6hd0E~AmZBP-XTSa6I*(=Om%}c0 zvXY{T{$|3g29S6AR1i4ulU|IEn#cgJ%4F2440}5-frRL7w+p#l@jEfhpDZ7~6ugC* z%d*%n{78PUo{|`J9SPEChs3jUvmse+anp;rbtuSF2Yn+H@p4(5V)Y>75jB64K~BMH z%p(%x_4Sk_jh2G*5N|KEpH7U8VyS@xc{53&4LzUnr(AI!qc#WmTC$TBbF?A}U*r`- zGRm9rcvUI;FZ9$#R7nTGc6_C~Vg5A(Rd2mS5iQz&xWBGo*(KJ`2K@2Sy5KX&LJ(wIJ5b6TbNQG#sxIBKz z^4HZJ$hmt*v)axPS6fsD{N69bTmYLG`6Oc)zO&CcR{Yo^Dt)}iK|L~N^Oia9kyFtf zm2CVePFmbQs_~ft2H{f*b;2$y;vaMYLiZ33Qh21vnBaKF5Cl3#ds-) z!MWud&a-5=u#qq_GD4`WH5)Wts{1bJxQQ^HDH3sg?K-=#@LDX{2rr~>yuIUzK69&L zA9w4mtgMtZfDN3HsNy)-*nm;7NjI+#W?H+>+A3CeTMt`Cn^tK5AN=SKBhw%Lm<~X>2I?dl8JLYKa8J<`+s=R**@ydfhZX| z-^=o=i}~JTEzpX|u5}K!d@AdLR&D!~l>7!yx2-6!m-UsFSo*#m-7a>T3C(lRgnZ`- zH=RP!8O5vXd79D&N$B#*w;RI?-Eryy&L<>2aD`78MhI!jnH_klv5_CegypyH z*1mg(tJQPYLjl~Y47-rN;htJR=|OHJ*qkNv$L-{vUqQH%?%=X%pp-&Or|(-)x8z0s zOy=gfauZbi9S=}<7$!l6ql{V8du%K}YUXj_DsG!0plb=u&;W4{qr1p2o1|6Kk;c{z zjEDK(**^HsFiOP$we+xL79fB}3-8opZ3{yvnD1g*nB=lxd10elY;J z*#jr)Io|=ah&WCV;{6>P4zcWR<_nr?_)h>#BOJhQ8$=9f*2WMY+Ljg9z(yxMz@!bf z!3{1K-@eMxbwsR&1?b2lFxAco1u)x>FC5U0xMor?N1*$>B316d3YfG;SL*!7HD`^D zy%xv2hH2fYn;#Irt9~{^NCEGt=}KrezIEkYmXNQfR%s5gdgTC`ltjzoqAl**gxEnd zYn)2N=Dog}{mP^x)jKfeunJDZk}G%XU;IeTC0&V5ss}WQYUkPY!s`WEFfEppNwvHA zAcAqyJ%~@Y5ywQNPr#&;cGDE&Dn2G4C3cjiBA7!i?yYv2qir=-{Xf5)`8Fb90f(@8 zBVJjaHl}p`djUl7N|Q}|R_CLn$_hf;04djp=8&#Z_F0_KDft!1Hxv>BGfVQc?(SLQ zl%{z{cbD+uX3Fq8oTMEXsv0NRqKIzK_&A~aKW#^Mx4`;#Y&Pl)V@nu@fen*oA}`xX zXhY?(@wX!kO%V&oN_+S6FwX9L6Bi#}NNXu(jO!QU+%?pX<|o`erR?UOaL_);kE49g_^%IFMt?biHN*emm;)1-y>~qeS466(TpwFW$l@xXtwt%yDcjpyB%2gn ziaK2~?yH9{Y!-EN_&V;SQOrc;wo+vi}Ip$yjO#m^bMugM-wSb_jBZGTIcI-r38K78*phci=ezVmVLc*x)_Zu41cumg(WK zNWm^llR032M{TBzps=kdd37eU(4il}v+@DTLM7;wU}6XJ>!Zbz>RVO2lbi}A3d zM1&!)yr3FCwNgl$$!3}=(gxKA_7}vnl#rZmPZ)E9y97QiY?O;VwE~d@e8V)d`8jiH zkbnXyf&HR!YL`X;!-%*4pgb=h3OL0{L$+@No>=tzDw>bq`M;tYcABX_;ZAIH5Y^nh zBp0{%KfXIWI&!S6qR_EDO^v`mRs{Uw?Ff0AL3{}h< zcVc1RtNm=S4brVb7|<9}0n4aT+!p?rJ?0vJ%k5~M%Cn$kuH$I;9ka@)a$s#mz%j!0 zcAQ`Oj#`?rH0##kSSy>%Y|^Daif6Nv`FrH6z45 zSxKy(`eUNBfJh^GFwehg)vv;l-eNH7z*>Vyi1g=IC&tph-oxL2aVG&OW_%Ijtb9@J zvbD0(BkM9y5U1-ue~SbIq+LPGri%JZf@%FXu~uDd!us9Yp}^CKDRiS41tkLtPU$E% z+dzJC#(g`LID~;GT|@;;EuOk}@jmJU z&i?(jjcc&du2b$<-UdL8QdGbJ!<%w>xYD0If8dLNy|RA9r5R?Q3u>4NUVUnCB;#Q- z_uwwhHAi>$w&Zn|`aXnL=o6k&d)fPKC^k}FgE&*(mEseej3iBun+6>Y7ETEj=5g_( zdesgOMdjKM{|dauApgu@mPkMu|FrYv+ssA3yzEpXI5dV>?NfBDlsOW$^m}S)YfY2D5W;^oZYt6F7gmZxBV)uhkhK9+uj_S|F(Uh05dj;gM{}I2IntTV zuM)THBH$5mB-0c}Y^Bl?l5vype)vcd#Yhl$yLIcp*$L&}&sc{xg)I!#=%Y2)7a>PT z7~(`MMI`EQ5hVWOdS?m;6Uf8tuWG?R? zcC}(8|M`c4Tgcy=VAPIh8Nz~2U4y`iYeAW?Y-lMg;{3TsY_ZJ?CxtC71QQm#xd+2x zk{1E)ALUuZpIS~y1yccH$hml8;G_xTFna|;HzA5WKC&sH^yXy}o4%kAak1J{^nT>k z(1c=NV_I)~LJI+xIm^6XcWEj4$C<7lxUg4>o(;GA`+G-scT)S|0sLc^3%{8{C9*9U z8bQZ5@01~d@V?e&xDwb43;}`^PFA z8lNrN%aA+hd3lU^z!G(I*VPAwzX)yKh^z)YBirYh&s3&Vu;M+ z8~Q4`5gCa_DXoIM@@;FG`;aMj<9gQlu4?W6>R0HAl8DT+^{uP0*0}ZC+Ug6Ei=>Qp z$MtKT!;%h$s1Ku34cak4mt_`s#Gx_W7j{ZD4p81>5I61!6JFnc>Rwko1H5ptL~R9b zmHY?kukrlBSM)$>+V=aSIU0q@F|!9CvkNH;96;Lbxzm9I;&oxuR-=xY)9k3_?Zf0j zy5^peM z%Gb?(TskT(>9^eyqak+UU~XWUI zx!*S(1_)FbZblW>8*tghkOzmCnE)~e3nctvZsr1i>v`O@xp88_hW0u68($G$&i7bJ#>|v$dWz zsQj`2y5|WD7rFW{M{IFwCS_20VjfI7Bx#i=kD`9Hb=E8@P-Bk*xYmd8r5kQc)^t#H zPlP4vy(squ9MHj}|EYVSyQVdfbT1x*anJ4NX`y;YrXU-;7Tz;F7^&FgZ&`U)`}t4K z#5~OJ_-vMmD|H*C_Qnk+jby2X(TXPdn!@=<`q0t_wD-$xf=>@07{y$5<|I?Q4)aXX9Gyd= zLa+fR8HyZS=1aoT8`Fs1+PJ(YC_Fy$kQI^czCT!L!pp8=kKLd0k$qM47#6F+m1rKxFYT7I;!^2{(Kv!f2oCnw^ye6FMcF> zYIc&&iwi@FcR2SfObUaE@^A zxCCk*zJMO-h4L zbA(?W1>u_)ul+w}qv7qMna}ky&)eeUgi3C>eMXE^4PWX(!Tnekm3ihD;xLXEi~b`% zwC+(HAP_EKwraLnRdXkP@4oO{#Lw>?4J>S#wH@Gj-kkb{ENYB;zdW$hhzCTWt7UCI zz+INI_mWy(bY)7|>YCb-bx*E8b_(-4n3W`S!uva7avk2P!1>wjj4z+l9D!uu+NF~B zV?=J?r4u;&P0|es;*vH~UDDS|Q@UEp9Azm_)=0<<>^=5$Tr<^(2W45O?bWtJR-`mi zLE%z}F+$XC#Fthv3X1F=Bw*KBMzlfG*NpJA@xr1!;7`|8fg|$1x%?O1*uV~fLmPDi zjljA*SK`0#0+3={roWZ24NLpk{R|vvuA1@IvC+^lq&Mz`4#qpB_FNdC+1{!f7%CqP zqp)#ujuYc|q-FLOF1d=iXfUW18%Vw{cHt;K(vW;V!N5H^29|#-4HNZRwm#jfdxXp= zkI0lAyceRX!Vd%m_x-)}XDN+INv*4Ejg?e<7gYqVhwV2;65V9nKkO8YBxK;nmPE2j z)h6af#wHMxk)VB579`xJPmri&6aG;IO`tbhOlJ;sa%?hpWyAwI{&nYn)`CA=jEN8Q%1ZwK|h9N;lxq&wy=_%@J6#7DCF$6Ob zGi0jm`0HjqCra#PEA0KGqLntn{qRti^C%GVFY#A!@U;v)JIZnLM}v9^&lfO%n{514 zc$7v=%`rcwkpIXoA}tsT246bX9K?`v!`ot@i(J2I$S)kjzLmZXJB!)T%UBGdKb zQuuh?EpL}p7fmzmEnQRf;$Ww6F8VEe6_CsCz7Dmfu{O9cj74N91IhQ`ab~Tv&07T_XrvNueV8Px;#VBIXAXx7FhkC(NDWxlCK34-F5e4^&P|y zU6?NRyw7nxA80%oBKm2wQvwh|Ofg9XvL+s$6oqd|9^$3ARku;kUOgjH9SpZFuJ&3U z@5rmfJ!9J1@UzTW*Ctgf}X(XaSL6j}&h@G-0* zGDk}@Y0C~Op%Z1Dq~eKKfnz)t6&c)EkNj+dyIR;aPWSr0?H-i=Pv&^;_Mn(brDBtX zq=Usy+xPwkB6#=Ej*GfAO4a`)=2Ks%+7TGu?0lMs_y0&`T5G2MxW zVS?ChOcS(1LmP5j98n4%JQP+Y>w#u+xh`L#8H<{p3Xf)q0!H4o8UJPUV8eCFG2I#&e3{IlmBxleG_j2xPg4N5t1=T3~GvYU$ChpZo%{GK&-{3O_%n#=44I%ULgDf8evhtTeAQ zna)G5&~t+Zl4j=27m&FAw!|KMC$_l)zZkH5(A)YG%QekSuK8W%)GpZE@d3q2$AFtYBTH}lM;&f3ncT?JBfpEoi!QSrb!V>f%qW(&Ao=F=hYa8Ny)#K#M7Tl$vn;PHLA@%09*v9Z{-`Kqrn9122l=yFD-9(#b!x$;}_nl#@EgYy-Ut zoBHuD{g->eO@o#BX^R{*kr;Tz4Ac@w0zb_e1K%M|0BIZr+K|4|eb(~N;iN}p({2F^ z%{542oj(czUgWng0{)Wjegyv>2Sroj>D z`_L!iBb}c+xj%||U}2tm8LDl90vhnfSsFYni>6N;@MZ0 z=z`au(+-fVv3Xk6%tGr0;aJ4+tb!1)IlNf><9mZW7PU_nKGfD7XjEP);r2aIfzLoA zDK_ben0TtTw!M{0zD#=bpad0O3hI}{pxZF?Xs9AIs^1@*g`qjsj~>^MsmOH3%&y8SVUfN5Ptsk%_Xo7|rO(fy2h16B;%7M^Mxi7s@BEW)c}PBax^ z)v}&RwxY?@3uYtKNYufnc0+ZVGKq}Ef)5OQ_#V{V-6qIAzyWbizX?8RfjfOEA zkKTZlS`Fb|kSeac>}Oz9kQ0(M&u5L~q|gr(=|lu^6(txu@A@Jih6@WuOBB8hOnH%z zO+9M_F4vg~X9NC=tg{M=vyHke?i#G|;1Jv$LU6YP2~OiKjRpvAjU+&T;0_7exVr@p z9^BpCnfIHjshaucioWbBF8Vy@?7dcjoDAIw2BoqMvsO>uHwrQqQ|{tXs{vSvBn6|i z4l^PcWdPD4L;sf5kWouh`D6pM#lsetGnZulM*eRb1)Xpt)LdH=FERLOo^YLZEHav8 zzmT9i|1D;`3`9?ewxm!m;#JtUxRbhVPTM|-;n<3K{>8CK&*P|&Au;F zJ{PO3mKD!(gCbx26z>#Inw4pW>|EPhI;wjh6dzN(Szw=@fd?(a^2Ob<%FTI;x=jSB zoc@8X=T}}4c``@4PSO6QMJ@rZC$_f=NDKclV{3|0oNMZL6+VfqC-iboT0a?egV#W2 z4(QDR=s>Zi=LEkvxJzjm_yGvF;QmVGz~}?n^v54LwL++}SO2~Z?630fWw9&BIm!|{ zP48@SAlj~w?7x(|p>Mp)=!In9ZgJ_thclRt*k(y0_jiA1Lmj~jTkpLM zjiHPMBxL4O{WEtvhvGt>TTC*cnX&?H{kW&A_}MK(ca;R3ChanomJA6A2{ITffFT2X zzR`z{?@m)PlS?S z6$-l;7I@-Rf}?1~039@l)Πt}IpHje+h+SF~_G%%%HnS?WRMb-EbfWz_nAv2$)bn6)S9%}E~LQkq@opHgj z0=4n869=mppMN{%NNF9fzOk!S|6t0s!rY z)v9b}h!#daOCMua7%h<^rk8eBX5ttwluGBzTGtlC*wgrAFBw%Qs`YtmofnCLLPl_V zJC%b&xvUP4fDRUIQ`48OmE>V`kp2KRml&Rq4;*!|O zeL9eOQwlytQ)LB$OM247BNPusJ6w$Wpgn1$a>=WYE#1Zqlz!|YZwFx{{kW6;dHYm% z_brw{Ee&^l_m=kRiRwGg{bbZ=BLpJOL8%|YT5y&~jud05k-`3m-J~f0J;n1)6Fo{F zDZ2c(ECnTz;H0}%Jo0m1j;t4y4!;JX7D4vT>VI_HJ_Q7F*aCPt-g4i#XGdK71Pun( zw$w54U9ITM$7@(q`{0tNl$?pKqc=GnWH$Wb0DO=n?0c=$ z2-FJC&xwh4d@{hskgz3+l#G|Fy-FS@#x^Zwsu@3HHtpd^1*h;hbW%VF*@He72sCw@ zsW{I6T)tbM&;@lU{x%!w^!{5BBIId7LOXUUch%=#yV@2h2E&G#aLCMPh_*T|Vtf^F z3>Hj#*Z4Xk@Sg(1FPe3O0Kobe6Pc#coHKPc!qX3Tol{dI3GL|Q;vxVM*E>44WJK`8 zicN-F8hq7;pzxzAxx$CSKkKHb3O3j~yYEWRn3WiwBFcU9x>am~!AGnV(= zwuYL$PWmV#M+K^=#RXPEhN+pM8ySRJJlDehd~-aiiIYmuhaPlhbB(kVdWuHF9^hBc zFe0^YO)mi{jzJf;#SDOISxTBr6b`p)v<3K;3bMx(V~IOc;aNh=EArFQIr1HuCSSBP z+sreaw2OXcoRJcZ zk!GT@JDQUb)Tq$Kj=F(Ns>y6M;9^WWbV;E3JteT3;->}+rM04(m=cI%sySgpqjz!8 z^gR{bm?S$3!Y{?|Kygk=$-DrrGs<;cLGf*0#MA}Q+Ulc|;6MPpu930mvJ}Ocj8U*^ z7aEja7%)=B<#A76q}Fgb1h&pIZ~bYka7IWVNOaj(;tO~>rp2K`ABf-GI6rI>GK^D_^DDqY&&5Z;aX;Re zKC{tm5ZW0cvNdMT)CDbbn(rZ^Fg&$ zm}qzz;aN1bp4USoLA~hjDS`&M7UX5<>$T5^ksou=O3Vj6(F9)nE#$YJ_U0I$FUh zk;`4AzoHcK-G>SUpuKRy5!$NO3Palhyy(IZ@oH^z4Vg-P(vI+}wC(CmA(}U&im*X7 zbvYfYcW!PxdoS-guk9HcUCTctuL(`7xGC=C4<>>F zT7UiSgamRg8D`>v-b<$Lj`Z)0riYPV0|D(wX#TCd=J!FFoU+(Q6Q(=rp6%`JZ+|R= zJdaXcKDn{aY39oTfe1Ar7Rk!}ho(j+D6o8j8yM2)f301U6ez2xAU=)%$!py`J^itu zSZC=qQ`D>f`SEr;|9_&1yfA!Qhl*Af!&SP?x1>ILT}QUE_Y-u8YNck&W47ITkJtgJ!et*-Tcvw!$yX7?!C z`m*+9RcPi~^yz$;y!Fc~1|U)G4Jl5jY23L%{J7G{`QBCYSalMcaBf+~SeDXF>9>ZV z578%G=e)cZfdn`A^T$(3)7yuz`6?GNolz{pPVay!IxF0Ue#lo=l?zzsmYHWNH4jpH z%6J!mW~d5S<~f?LCYGdHia*75=(+h(Q)O$6CNM@Cd#uc4e!67ZE0nh38c~k`l02Cv zh8ZW%OFF)tRrTmO_;aEsHjD0#z|J)~0LSYW!__l~$NPlEov3XV&o!|0S0^vvobM}Q z4Cy{r_Wm$=AZ+!j5G0szNPUP|A~u@cRZqo;%}+=5WEs&_5>AcIe(i89( znw=D`$Py%L(NXK^!oGw#ZxbHr3*ao0Aem2i;fNTrk=J=(J2|1X+G7{kr@m%!;SKIV z0CJwk#AN#fn^z)$w`=2ipB3KhoVcUmxbQ}9bP-d}VZNH~V_I$C4TqmN^>0QN@ z$U+2wrttO&uuR_C(W43VRf=W4n7@jnAj)afoVH|Akpum_#9Zv=wE6s$pL8OBQ|mv= z?frI!=f^|^{o{prhP9PqilH={e`_E&Ts~UcFvs@7MQv;p?Oy3qoE8*-c?4I~(ebez z1EjSXc0yTIKKu)V2Jo5EX&LMK-NEGdy3+GrsyrjqbC&ywLwHljRC$_lu;{| z1%oLCD@jXaBFCxB#`ZBSrN>f}Dgp=yX9v&Mq~iye^K(!e1x*mx1J9}^xZTtrOBQ-PH((-`733 ze&@V#8h{q?%nk_qV$o2hNp2?QGttDG(YaBHbKI&%0XJCJ+~WDm{(rRTw}wF7&lPN1 z7TBN1nZpymd5#s$(DoWKrOFrHXY(zvZ|Q>X`t`d+#NZK|m0sE1WS%t9z~d_h2rB-k z!}K10mD){^Kg6{{dxSsQ1eSk?t*j(Dnsax4Y2A2x2Jf`o9<|s|f!W|_X`2OCa>Q1_ zHuQiNb2$}O3iOnu^%w0}N0tRkbYiKDbnp}YseKpp}1UZ?2;HBjO2vt|v>?l@_2RL99DSVEbca zs2`|wQ1eCK&E6VHxFigT^UFf`Hsz|2Bc}DfZ*oaS3xUPBAdzDv!A@jXGd-PD^)Mjx zU};<_k)l`e94n{?j}uc~NsI4|&1W~%2pVA;ieL1e>edClG#rc+e`{lMT@6*3-)zw9 zWza+M0)>Nm@qU`Qwzg&yk^~(U=n*pZqIsTu>HB~ALb-F0OS z_LI(R=p$s&H7NYIqu~aQC|;(G*rbgu^0qi|pfYb**lAJpHP3PO&o5`6(96HVAop=NqwW>D)ATR|Zs9fv@2 zg}s=Tw)VH=WF@fnw=qXIo&*~ss>JWfp;s0MOv8Ht-%fg#_?nCN_*uaN-p0WS48}5* z22O8`rU*rfuLvzH>GSJ9pS)}Tqw!;kdTu>SgI$%EZwJ6RdjpIrEe-W0>q83enJ-xoSryyB`AoCeq5&H-<+}Ho+t83dbZ@xaRooWOa zDJd)O*Km<_Jl~MlJFmw5tFea8jraG52lU=PBc;OT9sj&l{V%OV_OxP3B0hJub9KVO z!NDb1{Pml97TeO}{}W6C2mp`BuVY`IZ`iZk7WQ{DJRkU}TKz&ED_1roiZcVQfg{=m zS>M-<+0qVRdv}OU76?NiX3ePz?2Fz9MoI}a_-N`$KV-HY>q|e1S1L4KE?`dTuP_gL z`L_m}^nAS$6i9CCi@>csJ9)-z_3hbx#%iFFkUGu;O8c(3eI5-O8rtzk7i1PGpXe&{ zh;8o~6cmQ#bd0&9ZAB{cFi61NXEQI@^VJX@9kt7W4Oc{%OBs+u^6V}9ev?YD757gn zE0t0VZP*%Vv-9R!sLD%JhutLbnw4a;RNUbFctv%-%~t?ui$TkQozd4B6~7u6v5m}; zvjS;y_?T>n9dtU^Q>fNLsD6|(cqUO?7USR_u<#Mkqn?TYnHuLHCj*Sbr?&>{#987) z6g6@naCIedaD=EILd>&uStq?nJ!udMt>blXt_Tp`R-EjkH_=wP;vGosVg&8~iid^7 zaPfDNUGB9K$K7@oh2^_$4SNV(A6ro@NKkj2ro|JW5^p|r5j9?HqEP7ZqH80Tb9IfQ zY3~VHq3HhwK-X$jlvXvH^Wzva(kfLp!6cp)S&^S=P{9u_1Z>$yfzi z$_=%G3Ky~ooFX!Iv%)!|kW4f({Iwwjrzb3zN@_V z{6xLsLsx`UEttA7-7Bb0&0g;iXk3rgj*fBRcQz7X@d4pfgOVHPumJzhxwyX!Zi#JH zn^-kpZtKy9e|1!DD!K5 zO><+3lZDATb=w557HdP4YJmJxL70L(0PL93$;@48<$vI}#YtqOT%XZGUOBGg$}ysb z*#0ASG-!|-P|N&VC$D~WT#1562c8!d>7H7|T#;A-ZHN+v_@jW=Ox2O@uzQ1}@~iCR z(E1-9JOBP8+8J}=^0*0%`$6yk(#uPqg@ep;$)c+A*3NOuGa<4<1gd3q2}E;muwv>^ za}wK;o7463$Kl9J(ifr4y`D}SE*CE0AyDiLqq4Pbx4N*JbUFu0O0Ld+s)yN|3BHD~ z=3o5ND0&}82eHT=80ircaQN!@aw`8)j>%EtZ7mVhgp-wOk=9P=l0*wFXY>jW>?j@NCi>^OsoOUYNHWbLer& zJsXXei{HF$nG!0FWV#C-yUhT0+i0Y4o7A)*j%Ymq73xuU%<=GygiLdH8?|Dd>w<_+ zBVuTH5G~TE2OjZGHdOfnFwHru3O>$aWe{}=J#8^AFxuz4aAMl{*Bel4VmwoohyF`fLkyg<)sa!1GWR>D1JjYRV zfAr3(3h;p;su>yek6EBpH?c4eDShBZoL3>e-~or(rwk}n0I$r1(4OQXQqZo8oh{ig zS9DO#$0?zp`aaw8f8wl7PVbz!p>`Ya8L|$3k|C zr#vE~aO5F`h%0FH3^LKlK`=IfkBzas+BObKeAl)PADQ!L^>y)-!5|Q?i&|W~jN6AK zP$DxFPi+(FCk1G#`l`U@0WVfsGY_kODU07^TQ#@UkT`Dp2qit$OXgZonPn* z(a=71COWRKuRbyJE(jsIR7BLEEH@u^$zU2j=cS^=yPdhq$To_&+B49@8q~&xq}VWb zPB^JTw?w6LV??9b82?H{mXU^64Cl}KUj?jUa9mC@p}fB*?pLJVbu{~e$VdelSy{Sw zB60=gqkqO|EooU<5%quW4=Sk3epcc!TKu>*>1Uh_qUP+>a&Vw{^Ct0IMw*YU&N~0{7+G1KsCd#?W zABrw|3J85zv8PkdMS=adNPNj!ngF>mF|&ahP=n>|JoLq%@UXSV?YilWnBJjZ0~eqq zS^qz-Pu$4-#o6t^2q9@Cmkk`w0@fs}*KRHFm(z)4e>G~-6=oCXq-OT!mG?@A*JDA# zSW}lf5)kzYGK;|0;yIfnT>71cX)nsAY(ZrOY7gAllGk;t&ttmzXJ@vio-RWMFpG9%i#np_0Lc(taf! zHKl!cV3%(V7%w=_$fSEan8Kkzj9SQbBlL;Dko5IONQc4ca!W*a_Xim{xs8cj>2C2c zCuN}9re|P)DR@z&n!aWKbl7Fd1L_BQ{wfmkzyMT9`V_GC$VWp1ue;aM`1`j~yUhO# zWjCy?-d3)*^(3ZLo}X=#i+T3}SjP2}!&~|XuY=N#bpdH}8VwU1sN(H5UTPs+ABJ98 z>Z%_0*oIdp-DC~!WR1^Q42!s+Gh4tAM`89$y2hqo)+@8bD$e0%{mWKa;i<{`v1oYT z__<8y#(xeCI7`8kyqp^m9wpZAIv>LemOd2h5YyCD5pw_e(+Hx9j%)-CYH8v9VD8CX z?dXZac87VKdzYmhL?DC3xh3bZYve@Cb`-4HGS)&oE}2Z1^H}Da?Idvg9~lCkPV61@ zJAlI!XJ5*qfaz<0;e0#fT|IkRWE*SI=3^|>6|#(A;Ul`PUwoql{PZ4w6YVA%n2YPl zD$K76DB>iJ^XOVy7f9W=4*%KCs7GNDUkAVg`sg`FC>>*k=~TIwO;8j4K5|=yh9o8U zMj?PIdqe-8q*B>18}@5QkecVkOZ4N;4D^~=fkb=JdY1XQhon)bp2}f16ryRFFz*<| zdQOTU0+Y(_ux!E2B`rBMX?2&!CpiiuNchi1kb_f zO$@ejKlr*Ek1cH{BC4mH%PofyVd}LfmlS+hFRtkmHt~scJegHc`_^X)kW) z97htpgWB*yClYVb-{44P6YmADFsP~^$i5*YNRCy=z_SZ)x80@T0Ow1h8tAA%>2aou zi-hP-=%(-eAyt*211{^bP8F{cChAli*PJ~r;%GhBY@tpuBHu>?=|W&Bw72r+Sd>q? z)DyT~sS@xx_jTT3*4i&3Ws2}g?W(Cx)Vcp>?7;MTmPmEThyI)OMdQ}!`nHm}K2uH? zetj6HU$mgNVG(V%V8`XON@~Z>VB;{|idgp9BTNd4vo}?MJe^(}ZUi+3Eg56pP)<)2 zdoV#9=BWH1Z$gSHXlk)WbL_1`;BPppAeVu4#Y_&3$y^Oi99143-4nT4sk}6JmkJ?m zhGrI)*hU7Zd-F|s9lykz5A}1P$t)svseoVzHA|pc%8NksiqGRcCod|5w_ZvYn2Gpn zKUn+oLZ&GLolufBYN|qVGwF_3sb#JO|98`8A%Ds}?qPbjNxt6QmqxJszb~`?`=XM;DxbmKTljQZK z%3ifnUe{5(*Htyj{0uuiR6xdvSa3E$W>4WwULn z-t*qVV9z)dBg-e5d zP`yAS)RII{+B1M)!{?=mJyN)iN|R}h=4jq7G}|zK&wWwGaYGNKqgJBf=v}hWKe|8{ zzH$PutSP;kL~qs&jx%q&FmJKr*AZe70c3isv(mu+bUYtBC|o~~DkpZ_+IIvVU4bwc z=1XBTT^{yg?jw;ndYHja1L8SR9ML+0)z@0wO~C*Nh;yZnKjy!*X55P02H%`0-s>pO z^qug!|Kcom=iQ1Vc1+j`CO60luK&LynBIF!vRi!OS1A*@uhE?JVfl9ia3eXX*Y{6f zbJWeaRsHf_U&9dIu=ZBnh2%-_W^bZa=&}h%n@prUyW3HBc!zz7r`o(}Yiab{B>OFN z4e_A?R2refKR$6B5L@RcbWQ>lFT$LPYOI;UHwwuzU??C?P2lXs*;LX3_=^XnP}@w0 z>120DaQHB3NsfmQHos`7d((~!W6vr*{5SGI?%NuRwX12d>;WZ~-`W}~V4vrW40>U% z3}jJ%)*@GzSUg%&lU^X zq{br(P3*ltyZ=GrmL6j6sbvN5dX#f5slC<_olN;mB%*?# z_>SE|AwznO724%RVT9-@H4uHNTI{a64w?F+sgUnq@Ylz>7`CYwUWR3B?4Z&Z*jv_n z$4uWV`+=CfFntF+yl$UBMz_>Yy9}`?`PLmaf&)hBV~P5UKz8Zb7{^`73B{JL_S652 z;0QQdypwpi)Z&QhVw5-NG!yXHy{LvY667Syy1{4L&bnMa1GeyCMBnZz`Mmoy*;c-g zneFb2^)x2Cg3K#IY1fsIcs_uU*~XgRKp`g)%C^Ps3cv7Sc`q(1Hb}IjIEf!@+qeMN z+!+A@Wk_nOo{aneZ8RYzc?2kdEvQ0KhtbWwT2X&+NQtCVNc;UJpG83-ZRS`=X3sg# z&%i#{YVSc{(D{~u@zd~}#a7`hLtzmOyEtOh6{0pVbv%8u9J(t^Vcoayifrrevq2JR zY;~DUlJ%4actC7TG5O_IRv)oIc{wf!lgS04Z1L{kASskTv3X&n<0^ zKJ9kAA|mGRO*u}rObbiDBhKRCD%xklb>7&|PtWuJ0H6X_TjCZWmB!_)^2lx3q=dVF zAM?2cNf{vHDMXBaAH_meP-PL2pwr_QgcEt;Re79&KnQ8_JQAD>c61@Z)H7r+e?{j? z=ClqRexK5>09;%v5kQys03;$|o;nsh!4@%k>$BJzw+DQ)_qGXeeO`J?yZ2b%^)Ecg zZ~Hv=^m<`4>$BGwNyL_KStKY@I5Il_lcT+~EmnDs@{DAPQ%dEBg9lBW=NBg9i5(rk zS6{o^jNp9ota!2Bygr;ODKDIRSrI=+)f~F}N{-Q)o}LbkL}Q`r0^&s5%CB{6gxe+o z_Y6g9nbAOB-*m3RNGRZ`5zZ|G<8GAvLAUY$RjK}O{n%?tNVZ{z?<-Zjaa&)A?y1Wu z{~Ytn+e+=Sz&@M6J}-rC{UtOwcawFXIk)qb1JLEM1)LfGHh4OS?^wsZ=RJ<^@&_iK zs2ZCK5VzXX?QL+1U$YY!R&(e46(Bx_csZM_U8nDQzg!@nhzA5Wi{1i~cpQvFr?-VA zSTNFmk;=m#AW9+@`@)gR>H|@iOlMP^t7|DetLwdgdW3uq1pO|zi5(1i;O(uj2Zn5W zsvy<4#BtQ2O;>^W5Bjm^&UV*Rfx;oqc^8X#KU-IoG?-ztwF;1? z+KIzVYY~%UT)T(i<;YCu9@`GV?lxu?SM6JmbZ#w6_4&KC|LwfnX z`j3z~#rQ}G00*o$oBr2$VyS!z)O5@Icw4{1@4Gzb1tj52DnQa?@z1OS>${Fqt_SIs zjaQc_q0}pZsjGstyjP-I3>okD&c%Tx7gA9SSD(;VIu$CXXxr&*-i0@%#n|0hKM1NU zHMxjY&w87iUgoPU2zTPW3hZ~o9hDJaPB-t_$W8-=rDo|x*+8dZh1$DaZ?SrQ2~ajwC7R=>uyF)D#N+4L>zZXPbxdL)nCC!?p#M}Fey19k3 zX{NTUAij1uFWtFLGwu7?E4WnOpfSCn*rPeh|IE+6t-jk#Y+9DQ-RYf6-3i*4OPp>a zhmt>?jlAJPw;`pUyyi)@R~3!mHfe6!xF?!f#n@Xq$4V$w>I}M78i+=)7rHb`DX;h! zx;0wFA^eL{ZC~xLPX4~3To9Bl6;NfUbBs;s55pI7KtUEq-rmfp$qw1mZcdpw(xmoQqS=Gr3c{646`x@CVdbLL=rL)U9Gp*%t z{o$h`-KFJZts;l-i;Zw04pTVdfDLQ~&K;yDDm5^KEK+|m;eHkv7eZc3VVLW9ia{yL zw70O((;GxnLEwkl(%mzI^MHtwll`6LA@y^9L=<-OoY~)*0=&rV4QOo*iJXLKzG=Q| zk2(JDD$?OW8T1ah9@eK71V;k2} zQ_7umR5`2M#{BrO>%&~1(B-iCdV*?{P~=FX$+yfi@@j&w@}862TZMx`t_Q9FfgTHL zIe_nRj~Dk+kNU-nqbuQFDwXgk8b2gGmcMnKSX9r{#%_2@T*m$g_@`U^w89W zQDg_u&L_FmFbN*3R&KF10V35_(7;|EPs&8^SM-!$U!$bBTQcrJ^OS6;y-fcsRThiv zy9O8oZ)`oOl_ye(x^hzODH6@clmgRk|L}ij;67WHTU=zTU;%3A1D;r;z~M_8p+8nj zi}T+lyBgJE$XSgje*a^hz|N5qy?+CSM~~rIJJ8aPF?p>4#v8C&(-Im;`CB6ls6K0J zB)&Xh5!dXsY*oHJTTRT%f_-=eJY!3*9VD?d+@jI11@7hwPdA7y(fVJ34sM0!6EVQu z?{k0H*zF*0dEDc6T5691WEYzH`t({#Da00NTn0mD2_NOAe)I`4GK$Y={p;6NyuJlJ(N#%My2%s^Ruvwsu?%Ovd zbIt8p0+FR0f-VogrZ_p<I`o->EW)&MS#~0XW_(#j8f9E*ai$s+(gXO}k-fbZy-f6UYRQoxeFXDO2JU&) zaRq4|t8f<*YprZE*aur%76mmm86_p2I>iDDdWPb+SyHZA?=}=*iYx^dxp#zQ*=gYg zNqqE}h3|=(^6kmuKQ=S|I)^ML07Ocvpp6)`q^eRwp^;4umD9iEkPc>3n1Z-}u^d7_ zKwxUPs0E;-$ZUWzPN)SxQYU?up>p1XJy2tKH1I7)nuMUaOJLp&zWUudf0B8N}At*9Qj&@wo2$pd=Uk zN#`c~cV9A0N~y$s`JT9?T0D<)N=q>Tq9YP2D)|5Kj535nAP*6C{{C&%IQ9SEu(SCp zbQg6!d`o?H_Ft*~TBmAT-tP6H@TZoob$ezu2rwt>6f zB=VeiSF=WX#F$LccO>z0w!3`cPX=-^P{GmhCc`=jOzc$K=G zAF&wXw1u8_{EMYLBsLvL`#0ZcLoo~Fef-IH)^DBbH;%@?;QP3U4#_>NeVQA?=8xeN zwJ{wA88~EYM=-Q@C<6InXDpp@tlptNAuyX5f*sd`-S4xhqLUbx@&=}!IaEb=C6P(f zKsdVF{+z~}{hmldo5ai^htBGb+-!vz6F5CZ-EU^#iRod|iSf%~Z$?RkgCzA3?TcF0 zA7cqz_6M#T?}pX<8a$}ljYZ)j>Q#GwIVaO@3^LMZNd^{7P~&;+8yHH!P)(e(V`a6) zxO_s98IjY>7hJSQ0O!`hJ|fI(#+zbEgoKqx$$#Rk^*pHI< zbj95zPt7YoZ@wDZ##(?PXRcx)T9Nl#d*>PS%YEo`I#G3s)S;*S2S>UewUGW452t^fyys!3|_j`p_$)T+rPMtu&D?)Fx|bG&sxv zrFTKuLmkzLTq6R=AwB$NeYv7zd(OTUm-cZCtVkr)(kW*Bz@J;Djk;ey9h_DuY=t$^ zV?m&brK209dKxfsI#2Y#F4|rHNjnL8 z9dl%qYki8&ZlG?HFzp~dvD%O9U!E~BK8bIaj5ARWi};>NIpSDuYGsDPj!1VkDIg>G z48P}K8W-nFyayCE(jPus*%aGGdoC31jo22)mgm|zp(>N;di#9o9P6Swc$K_aL1_KW zW-u2pcC3Xl*Vxk1!t@P~{=dJ-XeSk6gifwr4CpkW%rxjH8gidBFP_08b%LRGzkj!= z-wMC_pHR)@?B1s!S3{NUbvH~!Wo*uy= z>EaoNW$Q=g$ku`e^TPkSM;1_gt&%7BxFd>wAy1F818E`@Y&z+^gTwA=4ZyiVcC}P$ z>g@$)%b3NE@KWt5+{6s!5fF66jjm~zQJm#A92JW$tsNzvHf%`Eb>*(}A!YgoU=CB% zPP^MFS*!fkg;YSK;V;XS6I0`O{)iB6SS9l82wLY$9iq!iCxkxJh{Yk76oiRsH`I%X z6-!F2os|;kdeP_6oh)*m@v%77o|q|1U*-||9pZ$pHO5IY1P};G7HGNWm+m!re_HD+i55hs>t8Ot`j`Z`9*b0e)v~IH|h&^U88{sd*J!; z0(rNueg%%z7u$;M;dd(+8xBcROGJfDrY%-k*)=it_#}*fKYOeGqt(wHIZM4nq2y#$ zdV{g0Rl#aF_|7tX+(9stHmPqZIb|!lE}3W>O~df>dv_9xz~d6=o?n0ttsAfYcR+(N zDa*b|UQ6smn#9YOoHW(fsIT>wiYa^nEAo)FGB2vU@USBP#fpZkROzfL7lD&{c71n_ z^E~|Z{QQ7>`Rh-OQy#8%wYj;e03#3Xx;fX_lTY(%HPMaq+MQRWKDD0VYktm8dfXaC zmx6bGG$H5o&A1hsDz}P@4w#uqppe>+73vv*86)2CoupzUTm5u`tZF51r?kP}Ge8kb zM;}F!Lmfo=J*Q=T3{8GMOoBTn1lbv=eob?V3lj^IqEB-g!-4BFtz@}brQHX!iao z_o)30>4u^`c#9Q=0J4@J$mbFv>T}%(RQ07$_f4&Dq$x2;^GJXu_Pdn16BtA>(2qf6 zs$@qt$y5C;>nAj(%y3~$PRkDE%{_JKuYHBwuXsg8#n6$VF*!9SoC{Jy6LuMz9`Mjo ztHajLWD|wczMmA;#l^+Xb$o%SFsECBFJ0m5 zEaSyl3-0GPQChtxWqeR2pU*PygxPUl#LW3!K7Ar;vvSka)}{*xf34}7v#_#?ei$4j z8ZI-p;^$qYLq$eLPZjfjfS6^rTI2e|WebPY*77Yix=J&Hi8{|;skqVfHX3tz2J+84oKlUqO#(pP6c=dwFS z`A<`p`2&8mMkp;|F4}(|U#GfOKl|P8kce68Jv~=`(0p2p@FJc*g$FBXLhrmf?>+*KRxk2r#(XVSp7*}*4I|U(&t11Qh@BylN<`P zqc1*bwZn?_K-c^1NciAX^24Vd@uZJ4G^GVtx>{2%!W{7EEVe?_{`f7pPJIg{K6*8N zWVE~S(pw|n=W9Ii<1HIWfWLC!8t}7avREbtmtN7zE{rnHy@osisNn(y|0bPp&7fUzQ z<`;c)NC{Oqa%sM^Ithea^wbf1bAr2utG1H!x~8YB%Sn%m5tNA1oRd;1fD*1Z(H3ex z1gEQV1dgraZQeX~=o z>nW}JdTGd(x`deTTVwYRi>{kJwA=e{<68$qKCDN<5xK3S+*AQJH1~dwfzYlM(8?mt z=7X>qTe+=`ARQJCcW0;skkFnc;f;7SJ;3vdIXn#fi(d~~9mntKDW4cMLJm?pE;$b5 zs>IpbuB4ytLk#}JU0p08cgrn34(EZ?SSwv(ZGDA4A+ricd3B*9TVlY3Bvc=kVnpse z-w+aEWZ*qtAKYNXsyQGuQHY3;vDh=(%)jI)nrEHE{Scb^c3K&chRW^?vWJ8Uopq_V z!{wAGYBok6Bc{aP`>p6-Ya0Vl#80rk>M}G=-FYULo94YD^Cd}!6kY6c{k2DT4@u^l zy+>$1$d>*!#bmR50rtDuUp-^7gc4Kd8l}hQE}herc)XA&i9BH+f94i?A^+Iohh=a) z3v&1sv44{cawv&K(?(Rey6wUmiW(Ex1Lt8|6EURLo}y(Emz%xVei8DYC6&vr4ewu- z2-_cUTwjzUhga(7IxfN49#fj&zBgoeF!E|Sah>?nB!V5dND6ulI&(70nqQD~3 zet!p^oPoI?uv*#xk-T=cLC#(}yssXCxb(Ad zXXgrhRzStxTo2;&mqxnIIdtOsdJRFCl(n`2vo!%D!V2;K00?#)^VrjyLSZJ21b2!){Q->(RmU6jp+x4PCk*{RCbSPvP87=> z#OXTYy(5axTqP;w-Ttu?i7SHnzGPb{uKsTQYlzSbl(0r1eqAe#wgo=Q`*FM z>laX`!N|*}R+xNPWJcpht)5O=sg{FkNbkPi57$T5}b=E1wnHqF#2i%raw;S+a=xemeW-H!Fs z+0;0~8_dW31{TEr+UL<*$+fOTrjoW#bZjN8l_YI-wj3R4iYzSpO`fONnk|j*;npA0 zl%_Qd{A;ip4u8vD1USl)969@V`qs61fqJ8A#(+IH^9fA@<;g&*8M1)E+*XW z)Blh;d|;XtZ%1Ops-yWxkV zHO~TbR#upf73w3M$@VzKh5bVZ*aPUGvAJp^erqQk1TAfIfOW(g=$eHE;$_<%$O-Hp z9#ta56Jn7-Q3QM%3ULIx7kl11WYW7x1@t_SiXU zK_ne7s}fwIJW@aPH))B7ISyLr(O2_Zh;*NnbZ~G4zf*{xE`5Q*uI5|nyRHqH2UN2U zz#8dI0$xgLOz+l5k?f$dC%-KsBHHYl95`X0roloBOBe-ccXyX2g(u>{YIhsmJL6{$ zyQLG=B&pie(!Rd%Al|N?M~3p@A3{7)fN>!F>*~q6KbJmz1@Wqti;IIpMc(Kb8yySk zhZ?hezxTra%jw+6_jS1`O5UsVYzu(rQ~;?jFNRO&?kt7AeGbBar|t95uKMZUFLxZV zuH>=TpGqoP!^y9jtsiY)z!)!^VdQhwz_d<;oA!<#jdiV@Voj$btmo8}o9Wcuq{5c% zSKxN%ope>|WfNR~MlC05E5h{9u00CWCwm~pWZeI&BuC{ZiyH7#<6oHkt(|3+E)KLX z>GpetwdsiaP!M3dzivb9FFPo5$Mx=jA4leLdH`M~rx;X;pNfE-5OpZDLmg#!Da9-w z(02IV06Gb9zv; zauQiiDc?&hrPAu!s5+13 zdHm`cs+sIFKUJG|+#)V@ZuL1wg^mtgc@7+laFc?@K1(n4gXBFy^-xkgBr30SRS`xh8^VOD=t9Q5kYzSKMR&Uy@U zeI0r`l8zUIJ#0M*k$V%?4FeL&84HC?92Sb||iiR!ND zyq?_U@*-}&b|>m&Z?G!!mr<-ix3y9K7guK$6?NQqdpd`%p_Oii?vheKQo6g5p+mZ) z4Z1@b=?>{Z>F)0C&hvlXbdlapj|`(I4~xw2%f)r1n*+AN84IF_$v5m> ziTP#F^)ZEv>|b>gL_YsWq!5iGn>o^Wn3g; zJ6A=Ah}^Af*BBK)n8-99>PNxFHboc`a?RBcLBvqoej_O#b7~!{)~>`=7>~?=lfYK@ zh+;=Ol~2$D%`}U@K>*)kT7^|(pm=EX1UfD3?UW!_w ze|w^E=DvrUPT`DO1JOy~l-dtsas66F-A( zp|h);UGykn-L?orTag~A)i%E@nfOblNY`vE%tIrH+;FsWPUn?puh7Eb}MDJ8K380xS{qf_e@HH2VRpU;v z>kU3STTUV8#Nl8o$Dro*T%9={3R+6awcJ9CXD!E^hk25*yI=Hd5~paaxg0N`g@8wj zxNJ|U_l&e&rRkkA`qVpG{LWWbA?+#xI^Gf86CxmDqg;p!PI$#nYBvh8+|4y?F$Gr= z&N*MzmX@v`_QZ>V@A?Pu26k@tY^*r#@ICoG<$MWz>$s|j=U3*)GKY?G9QJ__efXc7 zvx^4#$_fwg^eAcmLn{zTBQ50jQ01x*!;dX++(nUQr|2D zgN`%iiG^Q=01nPbVUtaABGuAu+3vl2#O#|*?pP_uf#$^bg%*1mGUm1TI)R&*pqPEp z;eX8K#rXnn5-%}^{VWl;eGW$7cqQtf-fli4HRKQ{O?SeCQl(*#92=9-RTk^e?J2I6gMiZ`8#zp0hE*#)hSfPBbZ=$s5 zAp1q z^xd0Ei%hXI#U5O1C#Y7}O`~5ZT*0(W26ITxl%KW03}l%a5M_w%x$SP4EGOm|9?f}V zl{M5vW~m@jGyX8lHZ@gnUzZs&(G4QpF3f}ItCJ|&=lB*S{Vr?XzOI(_JdUNA(j?5^ z*9r@?5>VhCQ#B0k_q02!D#2)E#T--B43x(bINLD70&q=aqkv103N=64o`J!)@L&g& z$$w11XoOK3^PBVQALWfzDT%$Z&462@47Z6N-ZN!1981!fGCc)~CnY9PC*H1pb_<4s z{WFLcu{0vWPrZ4#BbPr0BhPmujhhV;@l*aAkE^X-tR-#P~(jfR1b13w^l7ft#9Sb)%cNi9+iK%Lu8h`}R{NEv~Z|NZkF>K#Kahv!*@CH*; zf}4Kv(v3?H|E$&F%ZM+3LV))bs=b)g6V?uFX#3(+tET#mb?R?(-KeAxqe^i9e7SgG zGhM6Q2k(re#Ml@IJ!!2ZxcPs~%Mbq*iOUbR@L z4h4*Ti@Nxs^g#MerSjFD*MqOU;;?gO{Kt=E_^MxQ{>uGXm^Q3k8@us#1%3y#N5!OM zuCEcp<%O%kx~X}0P_Zltr<}w46eJ&6){x;2RM-YM_H9AiYb)3Gm^K2;P1z$Wm$S!M zZ8dfDAIz(jA-u!kc6b-eq#^!iG@dWSRCu)WZr!mZJm|a?3Vu`j?&1C?>=g{zF)7;z z^6lnNcjsTfe#QMZzO-ZzxW397|H?+HaIm-6xw2w3XHolnp1`b8_2u5>Xs%jX=S?k! zD&Fh;TH0UGQSIJt7cd+f)^~R_UmF%C6ymQH-)J+w5i*)DD)Dk>p8Lte#pPW*l^Ddd z8>!r=UGGT^?zKzF(eXFJU*?SfvAbQG4&rTzG99VW%0+QtW^xh;xTzbj2LgKOzkH!5 zV1m~E{5XB}EG4lI^flmHO>=3J5F7EM#038~b!m)c;9Y9DCHaAMLOI(@kUxFmEUo`* ztLwr`tnvV`>1X!x@?2@km7T~Ml`d^wq5DLz(&o+nhHLkb`VXveH6XUA(kOTTT;tPf zGp7mkofE&Zk$42=c=GpeY2g~Xqo%Vb!#W+|xo2=gWJKYXlM$xN-5=&^T(D<2GULJ~ z!%a_mUs5EXxJgRXvFv3e=BGDU2!y9|gkhQKGbV`Fh7fN!s~EH{zyYdu`uC|kLj;A@ zDjsl;da8n5rrWr5L>=My*(qW<1vopK+gZOgc+!)h-gd?-WFkTQmo^9}1T?zx&{T#< zrztOCvR@baS*<`_G3!Ao{Bx*39|Xd6IM$ew|@8V#5yU;GfST0wYH^8azjRR1#ec4rm-lZcZg76&^W&AnZ zY$OHFL6?_e_g0?fyw{2@Yax7(ARX7$g=a(gw6g^9yH2Y!8UxOGXB9;8>e`yzMl9yU zHZp7zL3Vg!`V!TAeJcw*zep7V#*WZ8RrO>Ao!Ul!7ju9>-cvt6*F)%JaA*JG2~$YNg-#s{@^J2$$PdB-GVNZX%9@>)Hu|KTq@ zX?W>1N`BQRF|}MCl$V}jipvr5d9;1gS&c{$+Yy-mZ}$vpLr?e*6yuk<*fm^1zjVBi zo8HLZ{d|SHRT^WWAi-!Um|7o}uS@PV&)l_Wl&Bv2K>DUvAG6}-c+F&z_-5wV)P9_d z7?$0mx=R4I4ZzlT9yR^-h(47lA=lqPEnVMZJ*Huge$Bx?_UD?>?TEmJ$#r$qikD8HG6-K);LQtx+*ieasRXfHwx4g z)XAaJ0-3UgpwebBH#P@TCwtjTJ({sa1ORk|{7R}}rbyx|5DCN{R=MZRc_gZf0K9*_3g8m$uu$A%4A`H01u!sl6+(|dxP4gcuwh)#P z$MsBcEpv%>Bm0FY;jWj(<}P1BJQH|swH3dQ+>DlAwbx-YBY=7KV)57bH!pnD zBvLf{BSB%K6*BmliiIy9NM_IlStXD6x8IA`f|#w7X=8tv2ys?OJy?=$V#E%6OghN3 z=Ws}yFIc}>E)TC(y`IohZ%*Pe{Q2&((B*i8mArRht|o03xD;J)+J|oQ#*!%BcKu75 z_|ctW|90@FDVM&A27rjhh&6+><2ZrGcPe|mrPfqQvcMZ!E6m@%KapZNKTCDrv(I^Y zp@t6xoxekYJvyHc@$1c6=?%*Hi1Ls7@YC48;`3fzdEQSsH4R-9U{rky*{aVeT=g&snz>RY)TE{txKK)N(_|QN zFo3~3wc6b-^6{N1n~4MR>*mkDr+=- zBA2vnRs_+o&^yy1)zY?<-=(_f8Ek>o#nuMBEE|Nj9dy#2X296ytbQ;5FfVq9bxVAi zw=F=sK9CY$K739h$~*A+uERP@v*VbR?O3g=IpiKkimPqJnIKtxw4^D_F+yaCZnC7K z5r_KGy_jk9+A8Ny4O;xELT6KMDVnF9;@O6j*dPC?z|#)@sLw{C-94?1#R1|KJGIa5 zZGn}uds*Aw`MeQ&B@Aq=u|Vq0x<9umFz7SO@3%p5eUm9-=^h>F61Cmqd};Cyqj zP;W0gyqGH;LBq?N@B+WXWV3H+X(|JhiN4*$CkNbH77w zYC58|VSt%VwC=#O0@MIBHH}H6xx1asTW9;72_@|AW(ROQ0DaYw5@>6)Uug)>J4i3G z4WDXD?(H?5+DDQeShdP=1GaLCdn*37#<33RsK48Zwcs0u=wQdJvP4?4OPhIfvjLuf zDyatd8z=0T^d2)OQsRyuU1l%Jh7X0I9lE<)V#*6i@a8X>K~h{rQ@5kRzF**oUvE%= zIgya694pnQhwIt9omOqHn*;h2aqpmo)_66o*}#N6nv5nFD*YP=SVd+!bfY(Uu38LZ za3{X+LRT7aYmA3TDdP%&7}w;*{9HYheuh0WmhY0$e6wc$s~$Ib8Syaeg#8fuu%(tE z;)-Dm^auSu)Hc~}t4@qX7;$Cso$7HMJ@hgWW*;hrZ{|Re5pGjF89b(n>@8 zY~UEKJ$q7&W>UDvh%``}O+Vqd#-Q=Q)=b1gJzXW&{#tc<4T*w|wqwzskGOn8Nf>TC zyT?>DVxpZpm&Utvc)199!hK9foQ>Ji38TUYTFnleIfuYI5tan4=BEeDU&WgdnUmva z&L{9>vX?_xi##_sv|FnJ7R}hw=TU3rkjLi?xfr^U5v7&5Y5+?I8oMpUks>%pWd0~L z29>_LnUx!b6T3p^hvlfcUSV=ea&l^3-Dt@g2k?Oo3xN{Zk6d}J*eEETU2C{REu4Pv zy{IUMIXKt*)W5={4Ba8W()s1W562B!_DIU3oTxI_Oqa=Sp%&g&lMAZ;)Huo{(5F#e=dQiXbzE_BJ4b5Ml_A9lH55aTV8bATBkJ z64gO`y?M)9mTnwZF+<2?KLIQ1bzk?Nc3dkf_;xKvplA4g3lr==4p01BSUSCdPr?>H zdPc!nTOg!0l_w*kZ5@vgqq54V3iAltJRa9Z1X;5GA9XkfJj4Sl^m{F54278K@c}xj zrwTB9(LFwhdXps%adjc7(wXnZ%N(X0|2L!-xPcAx*khU?KAwwP=)_lm@sBmsh5Tto zcDr&nwfKcaf+~&Vot~TCL67i-ZGGDuRbGG*yeKF+ID|3)OH!?`3TgPwQ+J$^YwL*m zk#(W5IwUBQedvhI)X5S_gU63d1;+=)M|HM<7ZM)z2R-3BCdbJJ$&l7BCDpcTS&abE zN5||r>h1gJ<;XIZM_VmI4Ppx_X{!m--i*aaT3;=-4(4uClPkE-AW-eE*$gVV2y&B` z_lyq43*<$&O0om@e1D@>VqykCa?Q7;o{T49*s z1r@^|k$-E1;?TSUedwG1oA)QfHkpRroeOa_TJ62n;z4B+%?T^nZrv|#tF?3(A29SD z?eo`OR?Hl}!j_Ubc~${qa}boM`+W$*Bz&;k$tYeX>vf2wcdTi=pG2)yM$-6SxKHeR zwL##?^*M-3Uspb=r5_(+B=x)pUL`>^PfK^K(%64d=QwL4$My9PyP;ZmAqIOIdm&x& zpZ0}Kf+9&XG&)%Izkl&&b-`xqJI)fnE3u6B#MtaWfFa<&vMG|Xy|iy%`BtUW+&AyQ z-HoaPUvRtBJ!aU#_x!JvT~THqG}-JpoNHQAP9Eh=rPzaj-szWz$5|Th$8CYtjqTah ziHP;vcZN^w+xbN*W2(ln3+c0J?)f^B6F@ziYiM^;Vq^IPJLgEgYhQ6?8SJ-yeQ}#N z;`J0i-_)juA(0(RMedDU30#%IW>fgWLVBH{M%+M+!4lhW_wV%b0ZUfv2Dg#^*s^kq zk>2ZIw6d0_;2pq0YNIzhKmR*pMmFiL&#^@8t7FROi1*X`OueOa0 zY&tk&nHx>=>6w2JN8EPP)8`%Ln^GSvCKfs$U#`m2nk-J`RU`zYr=$6qtj=aD7blMX?IdsJM!-LHE6! zf9Ltf(O9lHK8AC*QlO!IT|o6IFWgNxsqO|KfPzwgc4 ztm*dg_c@y&30IAuXWOZQAU$t|C&@cpT&FDm+W2n!0i(MS!K)jcx!}z?=pMpEWpDl) zeq>qn{lCiZrQRuJ1ol=#s}rCIghju8y1!6u&&st|}J{?H@BKvDlW%)!;Ui6L&vgJNU@ zv`}}HBvtGR6&&`7@^Y}oP^+`ToBs*$r`VFz#f~ZA1P;TeyDY?@3(g;AumL$P=5sC51mT#+iMbQZF|WCehDJGVOb+P7$jR*@?$0I-+OG zAyEQ{79;aiS#Fy-7*-?6H++Vbj z(}8eMXrTC*Ucdp7ccQp$;7m6B<6NiRhT`V4ZjhKhZM7Q=a%9AX_0RZN&S(kx>nOhW z8u&VFkS15_xszR-Y1(oMN)@Bv%5d3`lwivgM#F^NcK5zy0i;p9o;K*%O49agb$dBi zO9Zl9cslS-qMah`%HR1Lc#bEdWE8iK*osbmfbMq;sfZT~cW>-1rUC$)S@g+pFVlfH zoDQ`dHar2< zffO(jD<4Gu)$M`AYo@MyfTyNG(k-cK_Mt(BSh9-}syJePym%WG!4PUk_*h>r`@gW@N7o$}u??3$;qEP=zr;D>ZY*T&>?_UV<(@c>CvuM!V0}5#>soHX z5ZyOsvD~;a8DF=7OXj;$FQSEg_67yFT3Za0KEs4}XyD5C80uXq#RE%oWy3>D=D6y1sEzGd^F!kYM>K%@Aon&{1*mKQIvCT0F!0Dv# zJ9Ugv$%GeFr^iETD&8hsgODLJT>PQU1stM=Zd~Qrv^Nf^p&1E?O=*6hKB0af68upl z>Ta+VdbCF7@HHdO8@S{iy$JxJ)yu7#NXP7%hBnhWRJ)sMXRHWO{j=73%j$z!W7hcg z`9jXZVC7AY16f%RaG&Byo31|$YY`L)cTAlf=VQeUaN#oR&GDv(^s5T_8nA!_>j?o$VLy$q;`>mk6}%Pw`;4IhOZYf# zjvJ8^IJnoV4>D4pCU3}oO?Wwc%#^b?kG5_2ZKcW0nvM+F;mgotp6cdeOrqH#9Dd7I zy4q#dkh(+27pE&bgkgaKb5XR@i+780tROWR47~PcMuvzt;`w4b^^Xw^+krnX#MS}< z{p%|r7sd4{|Z$hHTwE&7HJIw9k9CUuR}?br$HO5%uxZ!uV{2u;HSsu)sPK| zh`!gCZ$M!z(@WToLmt=RjoBKXJ_N!5VEeJ-k%|+HA8<+e=jFy%)b|`;C0ne&tg<8b z`E+kdpvkoo&wbh;;4ZCL4Ve6{2~V;^*G?c(Cq^gVO&f4ovUTYE-J-YD2Bmc8)3hnB z>^WXgP-_I{kbVzv^Uf?0HcYt-D30mqb4WT2UmGly6u8#U+O$eeQSK~sZG$}LUz(W+ z1QcE9zOmKz{!!~bFbePdhX6JnRFSbYY72yw3n$sJ0X25W+_RU7cEGqS2m26VP7|Gk zVR0zwgVxxyv%axrxV+~fv?G`7qx-hknCb^_0+rLLjs5n;;%2BOQ>5;>KFnj@Z8%oF zp|X%`@7%QB#TH)QfMlKDaCBFP)X74U)#5SD((jHB{dds3{^Ax2B3=*X zWgb>Q;xvhKEo;2}ovt}UpW0;fLuyw8D3`NfwL^iG^K+OJxu^*%V5kGzTfhjZ(}ZGs2pbQ%h+Y$#$wL&0?P zy)?oM#Fvek(z5}bQ44-gT6-3i#*Q`?#2E`>;QHq1=zKH!y185U>tmH z<52wWqdVz9B}s0=inPP%NPUNHv$4L~`^wFW#!Vg@BCQc_Fblo3270G>zF$Mu4q=rJ zN?bPgm?hJo#}v1s9eS_ASgJmJ6mV$pnHSQc=jlxBWRIthfL%1-H&K8UnOAqU2IMy$ ztiIieR~nNUFYI2P2G&+JC&e;K);AgJ8aq+XC`LJ*^x&q=ye&x+vK5DBJWi198@>GG zi+L64s-TFcmXLFI=X2S+_PW2Yq!4y{&!oLOn$0=Y3uNX=a$s|F;PCM9cf$`c{<#m< zA*Y%4Kb+=kkju1wcb_b`LU)EpEiA@HJ1$(7Is%MMMIJw+`DZk1am}*rNMM@)&%xAM zNc6RnK7hSxlyH#xek9y`@Df~C>tfeR}gW>7sM~O(O1hY zSCk(vw?5Ack=_mbyxrb?{l4aZinzJz|A;Q}`lP2$((yvJ3Ygpe=huo$0T+bsw^Zv7 z$0C2*wBjx!{|R~>DLUvh;@c;W$W8%E_rk7dR`DlMsXf6Ea~pvn@Cre6VT|Ndp6*#U0}v zo_A-Qo?R;yEDd3Q^ZQi8L-Z}%kd1k9E)+n-FZ4Y;oL!oc7fyTJ75B*$e^H> zJ<^@1Z(Nt_5{GXPb}9PCywf3;PbNx7r!;G4cm5;biDd-ocf(weIkcYcAuj%I_(|I# zU0MW4UBebWIp&3=f=DT2#S3HPK3 zg}NX6tjad*P4dXzPHr4r(FmHyaf~x3eOh{4bDgyI>Qvmk>z7cS+~wAhaXG!VuyqG%r^^ZPZlnA`Kf z%0gn0eqGQEBYW;@jTqYki&TfE{GQvV;QDH014F;{H6oBk{F^;rl&-&(s@t3Jf(V=|Ahc$;|8>L)+s4`~eg@WH zV{XB20<|89L(#G+vW~k!Ay(z95BeU9=s-bfV(N%uDE86DwzN}H8_P;g&$&tn zRMg9l;q0a%s5Kc8^qYNLcNF*7J=RuN_)UY;4YFGdO8lL-3xx zl)gM>>B55Q%y>lUpI?A*;>z>JT03?Mtd?$fK3aRXWlbfkc7sl6K`HOE@zy}F9=WxF zt#d=Dr$hz!A!7dRlD5iRQv9KPczx{?)aB5bE?bWd&e^D&V~g&HcxJF9d5bTqV5o@( zeshSG{5w&*8r+%^N^_C+o0DF%-VB}KxydqIUj}^W?hMWP5}E0F#upBJ(&SkTS?xu}OWr07W-@ee{2^f}H0+wSiLRW`E^WSFJ&UzveQAQgZ^sw$sEXXJ7i-$@GP|{&uVgHi*EPv8f zU~{znIPyc!_>meOcB-$^QN;9OIH1|*a<)i;@rpZ)I-8rN)PS|5{AeIUQWD%d-QiuwrT#P9Ii6odmj{ikP#^^ z51Z9opMq(4+cd9}r7cIP&C#M}ffth<*6bRdwK)-$fY!!g|Hr$BeLvorW!xN{VxLOx zzuRPe>c?hA#(tJS^xGlbP}?xL^a26??0_YDZvmKPuxG(QHUT!9G`IUUpccNXE@|0^ zokqjfU{+>d@9fzj_h?d#c9VDq&*lGjruX&p;jodKWU~plog_g>^A0h{&nh9K!^b*oRa$P03U@-;(qpEW8S!1 z&_wWMRQ_}d`g(2ric0ObfM8s=52vZ={@~0PSf7e)K#{q=y&sv=DXCWGI+oJ~7I6|S z9L)%mRZOCB8P;=Hu10*sq3k}d{L`I^xr>*b5}1deHJY6Bi-|flH5Fl@aC?h}KKgfI zEs&RIJrE=X$Z)8-6*m3tKcE=tK4mXGE3Aq4AU!FPP8i0r?FQ6BfxceSsb-{yWn{2@ zaL*Xz`&nYF#jh?qY>{R>!%$%ThM679c7OlQn$doabEkeK+-=gGCp z*=J`cb#E*;L>0nQ$IHhTmYkgIH42c^>N?i7?{3z*LYiE*qXOqZsic7{cE>M=U6mpY zPusiP;^I`O*c6f$7IZ*UMvUw3$UmQF*Dmvw7I($sX>U-)^2*fIufUh6yU(Z{4La8& zGlx(8*g&96wm*(?6|o=q12Epvt>w2nlDSboEMfkC^JD){AU)j%1Lw4)7h87JHh`Qb zhNP%t|41*3N?%lo-mO` zR&nTbo(4H9_dTai&(iDxkt8Q+4jv*mK ziZvrU(({8<8scjKDa5+y+X8T8E>Q$~;-SupkM>aKU5^YAD8h$x^!M+TVfTv3&Szs% z4)bmB^1%Po`%99lfKv|{j+B@%f(MiJxhx&Q3IVHlw7wYAw<$s_gc@>4rd?Zq=9;0T zd0Hc#6Sg7FX8kM{^jY#wKA;Wk&t215FL!zsKxrQ_iC8(Ly%iC5*56 zvnz05duTvHP5!2)FVI99V1Dea6@dd`Fuev1BzeKzHDY6F{r6XKUvNLR6RfcKUZ>rR zjK3l?+A*Dk)}_xvDSHfYQVyVxxJ*+-dvVMGXvqRc1O!@7tJFily*;0l`%L}u>P%g> zxRtx5#Xa}aI{_R%x64mOpQM@^S!m|$ge8$fLNvK}Wqzt-K0j;Xu1}-;eMty2bYF#Y zCsRRgx5;)ak-z25+Z{rv4PP|*^cgX95-QQ(aOQ3ktEysobvB--6|2`Wl0CCFj~s;U zh`ZGbln%8tdj?kekpp@C(BJbU5WYKvhqUaTfns_DgT(Pk6~#F?9 zXI@)$?ZJkknG^vZP<4&DVFp*&zfoF3G+oC-v-b=DQ2Z{~IPwg3T$P7Mcw#H+&{A8@ z?ivb~y>+p!FkmRc=7@jCmXFVjVPrD3f>p~t2gRWf@0nR38)Ay2EY5XxFCb{mSXI^V zhLl9Uy}EYG&CNxp^z+kM(Cwb@!>FjRI=g~I6g5&(EFI-qJH>JM$l0jO|C<9T>?Eo3 z7YW7Ke(^JRo0yDf3E}_&3n|5wvgUTu((`IO<=wF9Xb1WG7*{colp5pWjyk)dlF&`L{u|9u)^6tnyZG4=>0F z864%EHasl)PbIaMJbuZ3#_tq7-XQyBs1tIfsLl>_BF}TBGYQZ}H)xDj;?>8Q&)uX3 zRR`9BtY#JXLc{F53d)_Jbg>`2Q!h4Haz(9DhN_aJeyhGrxHFT`S#lw+Z2^b1_8O_W zb6(}4eao*BDzipAEdwB@f~`vgaCQ-tX&xh;y+n7L!jnP0?%ju>$2LDB1X!M2O;yd# zbG?~=_^|rK)4TOHJlkX)-H+bhwR6g?L(QbnU4FCMmtkVU(sQt5YhG=(Z+DZJw zFW8Bs-Vpy5rCxa9%75N#l!s?KhaGURfMS-y(Ws6xU8Tq2_0tHG^V8c^#k6DzADc^+ zkWqor0KPk_f&^VPO!w+fwf7O(I=Vwucc{56{udYYS)u_!jGRUr=d~nW3csteoJ!@T zd)Rr^d{0*>m)d7T_3Md>**M=#yN5T)(y$^4a>!Wfa+NwIY-qV7AK0@(i*l*rsl|Gq zQE#1FD^MEkSfH&97<>W>Op4=PW%!<-XV?7AGgZ&T|M+6Ac6^CCH5MZXyz$WFvTOD~ zk2U+0u$B8`r-YdkHf(?2q?^V{2urlQcjJiQj{b2-UX&99TpA&}wUqT1vg}7Cm>Qi5 z7AN{2%mJ!|c&3@&AUdVXj!OTwaAF~9Ow|v|^Uw*6+*a##rM+)pgGJArGHdB~SFNwR z*Yy>Zk$Ah}VI{E#&|y_sZOOlAF$n0A69%%7L+V=mmWmpr`UK0Vq6fLR$HoPZrsVbw zy(7QGo3Ffx$2NU51I-&?#HDMZTGtAYly#2EWXF<9qVWezFmdgLPB6@Ya~P6x8vFaJ zuAxD%f+>8)rfmk&_vX~rp@;`y-SxHa`R@n5%0MtUh%rA_z*Q4&cas78SY~tWstTMI+yXHrWPZ*h+xBe$-DP%T7VH zLWq_A!Bzqt9b-Q&2N(A4#y)ipv-j7LA!#3fs)5f&Jw1<&7CCnnZ1nEcaIl2agudIe zt8+tONrN1#u}saomT-2TPp^Bx(`QAu%fDTaa(cTN=w)Dl*AUY2qhk)D-!5k(UBbv= z&D_+3Imi!x(@{UVxW?!8A6VRI@l-H^Pv73Wyq@BIvvawW+?YN1;GAi$i~=)YO`Ux! z1^B5>{2Z0L?7iE39gD=f0XQ+IgYk;Tt<@_)@$>^2EaAH(W8c9ZJLs&_Y5OOM8nwcj zZJ6_c2kEr~7yR0~37o$6W-C`wSIdTf5{2pM>2)V1Io8n9&_Mj3yx4TA;Pe}vh#Cf= zP-q^C(4qtDQNnAz3Eb;u{8jp62N)DCA4^7`uuDu#jE07W>s|vW7FM_w`Um>voE%A} zexf%jc&~P6hcT;%A|6M$5)y!lT7C5HYB*3(P(Z`P>`CLZmwEP(jdlNTB${c%80Tds zIQPVrbj?qh8MO!Cp8j(rv>Z!$TMgH9kq}}wU|o~cZ;JbjO+0A*==Z`Z74j$9#hrVW z;{{!!;~raro9$&CaQ!=8U+lkpyx*EuDU?EyN)l^BOSW zR3h7p`xjy$Yq;v~Z?|_mjVvv#1O&x+RO59Zfbf${VTfD(thXn%LtmC%&D?v&fr`!hMEsQ;R#l`lg6Da1|qi3n)akM z>$E#y%=}J?EVzW3F)Sa>1^od}&mx*X`kGpNwg#cs<3?@2!25hXo0e4h>@HI^AlQl9 z{-u6=S@9!NRFB`(_@FWB`czSR5qEd{bZB=|Kn?~95{s#Gm2kMnw4AZA=9TkdpwWuL z-eG0X${#y4ck8jmFqlTqH;AFq!RILEkRdKn0}_CZ#yy%K?iz8#J1K}U@+aF{{~eVj zUb=Uj9}Ky@L36QVPv|og6M6Huo)W=8xNESQ)?v6KuzH0N!eaw(UDj~ssfIr@zv6Lh z`gsY%DTxwqs}S25m0mFIi`1hd?mYT zg0Y91&2=|n)$!^{%HC@{wBMS8_X0o~`SGTOaS|p!z4}^1w8WZ)V@7GB#8eesw-ysL&^Ej19Cf~Z9 z3A(mCj@Mm8H(wK@!_zy{BT^9i?Hf1fr97j6gd6lcB)nGQiYYep*<^bi$5+KzM_90D zZUTjY{|4ge5rH?$*5Dr0l1RW?o{?*{{xy!F!N%Sp#3O+ciLD{@2Qa3g%2G>IpUSRu z@1uyhM}E-VI|VnRl~e^O@)I*JE3%gPr%&QoVk}Rq;l|4=t8XY>HDFsC<48%!x_|85CAO&T?V}lNIF-Ciu;Aodc2{IQQknhaQ02p?T=V zhyT#*4?)~)nk-{#emQla%hnI-b*=NuTEJxe_QjN5om9^F7guq>hHeEr8~ozd<>7ZX zf6D88zckv&otBZ>#RCzyBb*Cjasp@~Jk(u{hfg2(^lPNA)MDWJ?qsGd8{M!IPaV z$r(~E(g4}gkH{zdGn*@!C{vo%1>B2(^Jz0wA5k`-Sj^t@`yd^NBFClADyka0YFq_8 z?cq_0HpMTLVtiU<%Rj6H`g{HIk+67Qo?-f4ZnykdOP_nA9bZkWbGY$ zCnI9L+~V!K)U!+NubQ^<%9%6*jZt6%-p53;yesa!D=o|kgQ6;qn#>9%tFh`RD-ODu zZ%cmK`aLzRuniV%hD{NFT^J~5vXUIaunBK0$G2)4v@_Ep;eqV5`6r%S+mT)95M@xt zZ#)s28V`3iLj(9d^QtL`awvW&;yDww_vsd19T9V z5lYPFFLP^7BiLTO@mrG^;k$qrn9hXUvT|rF-fnPMUTR~ighsw_AL(j2YJIxY~q=3l#x4(YgTXPx}!Ef z35G{JZDw$yID58`%{$J?`1%>q93{cP8i=3O*47qhE;cwW;{0KhNC1WFSQb}SayleR zzk3I9UXxVH6eju)vi$AGbj1@YG3W=;d3Nkz09mijmNfZT@ed6h=B=I0OiN1zz!q7btv`7U4mg}9C$$+u?ZsqLP;w_+wlL?ZMhDw4!?GIyK>Je+=dHW|Mf^>hL_!1u?j@X@ED-r+5+@y@mDR2M9zxIa^a z%y*T|Z(rKyT0i65XgDq7Mrs1H7+rH&Uo*rBMP}GwFrw({fJ3WZx&AeUFEq+oLh;eb z`A}vC?4r@k{t_N*=);guMu!)vd^|;GOW$O*%K=1YTrfRp6c*>MSY<9IU2>ix%7#Si z(`vFG^H#7<3}1wH*JQU;_s(TjI76_Cv>)5CtAbwR+nQaN^>1;K&O6&OPUj(Ya*cQ8 z5WlaSJMq|k-REKuYYQ~kwFgkZ&z~|itIQOGifn6Y5=e}K3^w`J!3?``?yLK^39(rr zMj93h4xEp`<98BA!8r>5j>iF}a0yAP1_tF6vu%5?;>F|Bd>el%Y_zGjT4h?b(la46o3KB z&+fo}xVO!VuM}>n9hlW7_d@`UFB;>tKe>%Ds^UBYUuY>@4=9Iut;sCi#m1{}SRkzF zzA9BpQ+Y8a9V|R?1vJmYK&q_F@$~*n)$mF~k-Pauk})q6ZAu^?AC8{T({h;Cn2q1P zwD@+ap0Zo4HK$0x@O1op#c8OKQD$+$rV%CX&NSW&$M5yyF_b?XEN3oSVZU|lQw{?_ zB9o(Q7V~6I4V3G<Y#sY<^c?Q$o{I=XQWKF zM|~0##zRx(I1Cu`yx5Ndv3jO!AbBD82>cQ-XqD?^$U#+V2OdXh`f$R{ce%A?5}ynabEIvR z9+RB>oo_IM%PJ+F)1(zHYqLQoiQ)K-mvrlo`E( zrLweQaq7ZY{S9xN*wAmboX@{dfsUD!^z@$4nUZ{a1fq}9Zh1#J1vr}0eh2pO+Bo`6 zEqdMa|1u^pv>J%VAP`A=KKDA+X-RTNvARh<8P=8^SH6#RTABsWDO*vjy+eDcQAyNN zJw4TlnXvHONVpN=q;Ry0+Rh1YY3GMM6pH0q{6lReSR1n+msJiUN@%K?YB4bQ3+X>d z^%wl(PWWSlP1a+{YK!6Mmj^kwnE*{DcSxM&xWCm9LoU@w;g)cZL2EL5$t2J;MUc75 zc7R%Bpc5O)!;0(a!VK)Ger4?^QFOj}63*R=wbb9890bY`k*{NAxGibu^%WSa74EwA z;B9sLSmAyL)+Lb4V(JWj6Z{SYxRf!!vT9Hans6;@xwe~YA!4Qsm73Z0xlb9#3@TU~ z5lEYj$M2BbOb-LervJm$S++&_N9~$M8YG7nM7q00K#&gU8W_5}yBn31?(Xg`>1OEe zPAOsE|NUYg&$D027jW+NTkE>c^Y@tRY?`?+$$&0;J1*;`fV+=(70J=+C)gNONl^iX z9SJ}DZ=zPbhUT3AEp0HzS^crozSFag2?s8t=lEzl$1O}WWA;vUV0+gtlz&d1A>FA8_}Hm#3KEb zzRZSx?|9n^k)8^~w84~kOkQ`#$ruQ7t}WmZ*Ap>NONuI7K55o42r!HDI8&=%C@ZdF z5`(zVpYsA@IqjAB=ey<&%@D7cL^z_w#iYD#ZE_i{3b{Y()9BGB3{giSqxNBgCCy23 zj_Q&dbh#}{Dgv^noM2P1uJ#X9fGWH&mu_vS_tmOh;UCf08z{On;_CQTYhO8)DR^v3 zP^;T0GnqLb&2d{PAzAuB1rdr1T;c}GVJ3`~()~$oO)QODj#HZIIEmkRi22>l0$h!z#Bn=rI69|0#)cg@F2tL(ogM;vBT-h#xwqSS>r_{RZ`lJ=8ysSyCR z{G1NJ6?gNAU=!jGg(yUwkEuLag|Ec;UZjq=O|a5KrFwn4jiC1#QJWt)*{zPP$1=I_ zAzx$PPtj53Vzq{Z_KdC|E3mLs*Vl%227E|JP@5{W(J>5Qd^38CyS|we9a>_n`64;i z5D6l!!;dLeX8K$`7AruF23lI8V;CbSy|U79ayXtti6$4sgh#^Y37>|r8IF7(OKpM~ zrCtR8+h>M0T4CLjiA2X2DQ=+(f3-L&8BPk=DKD2ZHs)YTGWq)H7bnLLi50?a)#|HZ ze7QAmOn43Nkg1R%9Igu@xC0pw#~UBg*UW*!*`wl4wXo#L^KB~-ldY}Tzg=bO4y+T0 zG+*-XYPx+5+d`g|vqvtyRV{d)Sf+V3jYM(hGhULIk$aE+Vyy5Glyjc25=S8A+nMk_ zeNSK@v3~ryoW+#U;+x0V^LuFGp<~*iuKb*RMx>8EAob)q-02Oh1d1&H|v!~3}BAIzjM(NKD@YKJ&v!{yA!?6Ogx~? z@}{DsDbrh9TVppK#x^u73xCtCHcIbFhy4ftuPU=FPPEX#{gA(*$lzV8%Sq^To)iRt z1VOv}x?sF??S9(~;pc>Rp}vB8OI;epvbe319(Lk4x?Ud+VBeJg0J8d!m{&kAZ>+)i zzwn2V4`rKn%)6*NMWU~t{Mi;6k{Batl$iD)OCGQ>EnQ4|jdO|{Fq3p$g)du#p#wr9 zZ-2`k{45&%?!4Z2o@;1b^WLrUP^hX9y#Z0^-q_UnfeoKG=FCpd2(QV^oGbQ47T@TR zP=FP&7(6y}!Uys}dB(gL>g;H9@Xxtg&w#el13%njy(1v%YHCMV1s@OWu=}&YSG^U7 zt8Gcp$IkFWD1vTe?9ij^bb`j^Ub4aV=BqHY-4z6wH9}l&3c&zO4BF~R?`j?1-x%`~ z*q9*gD;$Sy31_f7RI>tYu+VwVFr&3qWoW(ed!DM4NSA~%+I#WzJo?Ng%PRt+tlEWv zXl%wYqTQeZTrbE#{i`Nhr6ADz8q62rNm*Sa$ZI7rv}Q#cBifF*>L8sSQYEAoKVye1 zyQAw@WAwlgQHcG$iefAF)408`jPU)ipTagwf@U$6Sf8W?@dA$wR}yh{!;81B7|Bua z%7F;{mv(wa=nM6+l+VTgufV2Wt6?Xa34_!2vqK`iwC!%lrTg1;ziz1@@$8F(P! zoLf~zOhwlxBuG6bi{AlF(oTXGM`te&6^3afDl#naR zkyqH`!?TKLeGzN%iqOi+j4i_@)N_Ig?AEWbvOeqW;TF^Re6+;ZA@W5s zOH;pewnp~Su4){lJzU+RcFz4?aD&v!ylFg((g76RY@A_olQ4VM$8jCp54cFzxbcVR z;w~~`Rm1kTdjH@zJGx?2rbqNXLdvS$9ub&MH^^pN!yv|=*}O5v!BvH5Zj4e0z!rzM z9*b1mvv0LZFuJ3yXNr@k^*lhySi9z5J(<97Xf|5GuM!M_H0kW-iX6n{$6|JYhh%$DSr_(p((Z~{;a5o+v>TjG(Gp3P@Z_Q z8&BXXS62yn>ys8_D;gHsY>7%Be-U>j?#))7j1d}2V(4vv%apkpxK>7Ge|0IREZ9tG zA&C0RjhfYg5Xd`I37g__7&W~!Vei`nvO7PRWh-^kT}^~lSgYc@+0TmV^tjJxGYvwT zRY!4ryxCdEGvXMBlEYI6uFMvu9F9ogO^`;9=0xut@qV#Xech%B9Z4@g*P{*4n}|9A zutSm#Gm^wvHiZ@<6L$&eNXD(57UzJTW_X>g;aVS?H_-CFTMG}&m^5JSF|COFgYIVs4>SfLtS}Al} z8G1D+%=Z^R$W;4aZ(KTR{cGHc`YxGn?4bTQQ_0bzcdan0=s)r9l*WyH1JZcL@HdeC zm|xnhMs-Ti(2%CHBEO|J-S7oJ!qRK{}bTlkQ69D2n2A^{VNXPab|R5cacZ# zM(8sZ^4{{E3=PhD02OwDLVz~=AgudR^w-+aGItv*C8q#Ll3t5Tis9F=w8KK zAE4h(`R(7OHC^-`yO6N?U8%@r@RRJ&$`4lm%$q9m^9v3Q=@IRwsI;}MZmhupFAYNx zde*);(2IO44hIp-%IwJfNlmpW41=Y{4;MiFd}TB+_H3X5tJ94GNV-x6j6Z^A^A^1}hWr*5F>UE{n2bb{KD(i*LM z*8e>_>^`87J{>cup9;o(C{xH;3Ec71yy3S$cR<(oA(&pGald|;SZN(#&0HI(qjkI7 z)Q2ZN?9ftmA>$oE9iYI%+b_%w&ouC5ombnReZCJZW*>u5F9k6L=<~Cg=$N;t)%!QZ z9n;bHGf|eIC)LK>?mVk)Y7bm5cWrw%mK45TpKQcZ@9#LQ#|+Si4WmaQHro`k_##Ia zbdHNvD-qvl_dj^c39upq#Wh<_!2P1393_(AsKzPoIL+~K|IwuSp@penhr^s{KYQ1q zU$b50C>0oj7=Y&o!U}%fcEWs+Ze5Ds);4k5$zm1Z{h+lMDKRlBAjlvB{z-IjGT93I zm4S?eLHsdI4CgIkbMbm-C@!SbYpvBKg~x{Y>Hdr{lNJL51NKbhc6B)3Pr*?X$qSHh z^tz^>Fw_P9=T7CdTmKG?VcfekNZ@$iK_7 zkKNl<5+A`#|Nd^E5r6lVWepoh7y4B6vHj_+DeG$gVDaHR%8BrNl zNf57H4r|R$7@a(ID$6}@rYup+QB#b*e%N&)a$8BjLxzFYJ%iGL!|g=Z$eI!85=Bet zsB8DJMQh811FJ8|9qB?(I)Q-~#ti9njgAYpg=YN7_ZcQZhaNYBBBw~6Jl))E726u=x>GFpPL-p%C$sh@#O zN;P!!Y>zcTl(h2?AC`&MI{I*v-W;aJF8Fg z2KTV+6@xfKG%&}M(H&aWJEV-S->A2FQ5;A~sP?er7FeW4;_OuP*vmAvrby9PXk(Gi znk@w}FY`kJD9`q}F0Ih|VEA!}9VwafO9xl;wLtt_p($`zJ-s_f*8mBcP z(q(phslQ|0S3Y(2Dw)WpJ7s-n_OH7#x*~N+9H@kfl|5G#H`x59*n`iAIa%?9a1~@* zdxx-D-E8imU%N$`3FUnHz=!x$;F_Y1&^9r*_*Mto7F0_K$97Ha; zifkKOSZ-lGI;&FDQF9+^_(Nila;jLGN<6?hs%2=Q*4xmRr6A+3&#uhFP<4Vpp{d== zDufFJt<^Q`0Ra&o1jo~Ms&ZAivMy6&|!jc*VzH`k@ zORA>r^9B}HipZh6V3V82S>&|=Vq8&x^%T*Q0}wKY(4z+1rk<+=70!s`9#mz2g4$3# zz6t)ws#i7E?^XB{s7)8u)D<%DJ9z=f?g%WKer_i(0j?soZWZRLERr9#gmeU}B*rvG z?T-%Obn~M{YUKR2Pzb8)Chdnh`$}aonJ==bQB__b}vnkR*!>=(GmEY0dbjSSm?n*tkf7EE(;xORjj% zxoY%m;E8QFde3yYozu52)eI|i$1S&yNt|s!$66J-1DB7j<}LT^i^!&$V+~PR!q)v! zMQ#>jX~mt&luU(HgeIumK*B?7X^dcjn^!jES8@RRin3G2rlmIUjz`im(VgWLMUh-` zp!*|>2a~kLydrPFDnp36of=66>jwI0z&cULu7&7*#>`gPq8zR*pspphqd$s2biQ8M zJB#dgx$6s`DP-O+u6+|9ChsosEPsdo$3hrA&|+Njs3R266E$(kl>brmUzOQ(w4o$Eo#3 z4F3t2boGpe8AA8o~SK^pSdip5HkJR_fr-iuNnqs73^$G4D`-j4P zLIDE-=kDSZY5CXKyw@n#46$KRm97VPkkAF+V3lwPXA$IQvh<1E=vPxyo@-Rp9~#d1 zT8NU;qs*VRc}x^wXDkGTX(>vUF06APxVR3l+mh)o$Bd+W&hK=ZF8W8fmBV$LV5Jl? z2}X>*8K#Fw$Qa5pXFG(#4ALi9fIzn?mpciuf$iwJ1&dOX5eQQrk5Y7pJ7d3JZViAKXqb>}`CyUVSlMS~{n@9|YKHe26Z`FbNS+~o(f0g$dthwe0cK&o&3K4GJB>C694^Ql_ zAtk6It5>>NuPXd2htXkRu7e~=gUtx5S)#zc@=X?;oO9(wELAEj=Yu$vm|aOqEKB*Y z*1p=Q7sZ6g=j)}PIm6yktEs6v+Yx?+2$K}sC7W60q4Sr3X!P$Rd9bJUXCn*mZ(7W< zu+bz(S2o^Q45A*F=Rya#0xabvCPR3rJATZa|L%2w{0s`o&hD;p4#8@J6`?xtwWe1s z1b6$x!r?h|KtkiLMVG-%FzOVqj6IV$K?S49tY~N^m3=xI8nl}UR3fA=DK-CC#r1tN ziJkzMGeP_(`7c%a9SFcP`bvtz=hjUWIgM7-PC!853-@xHo1G!=#7=%u(QH@1L$aSF z-S$NgP^{bAGXz+M+joWK} z;-n*Rokb7l_siW$ul3<)+D{!ok1}|p3L2QXd@J0X)?64^0-A1rvLBKSe0$?Wec~>B zABZQrd}G5VGdCx8Xp39=vLDDR7uO^m)ZjT}8SVL*;SDlHqt>2X!55Xxe9~u`!fDl( z^Gr;s!o*gD@D;*H5+)hwH@)J~{mDC7#V$-OtzNg&DXgY)me)Ko3uxXc5 zxM+~@I|wq8kv*219O?Vefmtc=>s${J=M>=%(i3=_(jZ5Od~Hz=W_Q}zTa$tn{d_Oh zcg-3H$2?NZ(3NO*5jh?y47?nE?rhM(W}$>VZb9E|`vxO_d#5f&VMinJP)gZrfGh07 zjlgw&Ru$tLuzYw@BvTR%3pBT&YJ}qAF#zs4A=VpI_n}mVi(?1qzEchUdU@DK# z)tTkKHSb?emwQ(VX}+Yd?_`<3)7u7B>Qg)2yhf`^mmPK6MU`qI(~fMQgDS z6f56XKfFKCu(Cg&aE6oZ;x({ygrGu?}nQRCsD&1|>u8EZf==%}0k8+U(W6(5L4QA<2R{6!iK$FatYc>ZIijSF8mUs?z@PX>44h0MjU}+i=2vIsNWkJz($4$RwVG6TIPKtDpGSF7ITPIYqeRW-9cXEszpDhchB9z9e9tMRhlQx=`It{>s_zQ`IU#?Ce;MqpO`yTQ z(?hGu3()pzlFnszq$>mFFN29L!?7Eo?fH7cva1oR!Dn-}Qob{`yHl>18KZ`b$D<@a z9Oz|WcE(20qZU8h$>6B~;z00N3sS^^2G#zUiteK&2)J!U--I2Yf=Qhm`L`z}WbP*u z{v0fz4yUr5=4r9FqoO49frz9~q{oi+LhHXJ^H5*SeCjvQ{%_tH)Zn2E;Kug4_3*+4iH0c8LdiiJ#FkAm(E~Ql7$FM{==(AlmSEI1G?f&fjXI$-p&Uy;Ew<*y6+kV@H>?!WaQWZ>CJXf5nNO6dB5+SZE`+^z*TDC~1pcn82b z=Gg$!>T(yt;CR3#1zGaJ#n$D8DrVH%%ZbeKEvw(lpDG_fmdIPC1&lUnyeezk6n?$D zRes%3cAW!3MT*nz7nsM#mQ(g^?uoejLBK(v?`qsbDHvQtkd zV(>{jhpw`$5^ySg67qb&#-b3y+ib2(GORKt(bG?dZv^iX525Ff*xk6`o0pdnF#k1F zyNLH{k~}kNboX_y!o+Y{R30r|zor60$hoJMJe7n8kph2WT&%Iilc2f>O-i3PYIB`o zV?YzvJY})6M8*645%;EAnMwr7_@?j7$0Cgzy-W*Ob%Z$kO}vg%NhF8|MHcLgUQu`UN?G z%n$V5OWXW$iCsr<#0KN?xpH_XCW&l|%E0LgyE&gSVDMKqyN)d+?7CEuAW8-K?B zHz^0PS|t>-q9H2I!~ph18ilW&FrC(h8FbwS$}B;9?z?6~5yol``mR*hj=zHUoK3aM ztIJsmhr$Bt5TbE=?&tYI>E#lIuL;&-dI?#fW4G@4`4ZIh^u+Y&0Iob%OrXz5d6Z9E zQ=WEE_V-WXxafNK94^xn3jpnC$gbDN9m_fMC?Fs;WM(56mF-vU>u|H1I`l6&tA9Z} z`3G@w;OZ?P(%SnVs2DiFAENQ|r$#W|(~y>oy2y;<^#+hu(>31Rr4mejI#5X)(()fm z7s?0h8)X*sP6%|S(qq+pKevDpv&b}@ho2hlNNY*@8$`{CYK>DkbIF2kSa#@KZv~rp z8DBvGvpO3~v~sdEzdTyKLI;g)^=0(LkLT^A>lI7nsUA_pWh6MqVx%UG zWy{m0p*pK`<$KsuYg%zBUC-Swp&T|=mLHb>8|_IEp{Zr=Q<@*LkE3YI6HGocz%Df% z>mwCneS|W??r3}3Y3#7mifbG*TK*jYGZq<(GHc-RQdGqb=86DjDwN}EpqQ=0*Y$^1YFk<+^Sr!80l50mUa1+< z09TW>-ppM)L1V6M{vDT$6|Rr*4l3WOe^I+SZ1mpH@zwSb*~@JZ9FRp0-1?wH#0fP~ z!XqPNrraBhA*nH+rpj?Q8MqcDd3m{PMo+S~whj*ASll|>=<2F&GNIYIKU)C+e<;dv zzs2r>7CU0KO8;|D91Y`-C0hXz5&hypjS6j=(;+SPi62=u{ckeN6Mo-!4i3UXLy?w- zfkaMN&>HH<=x9i)JG9!c_hlLO2$z-tGf7cQ_8{HcEM0W(d! zNgNR2js8j;V6}+8@cr4T@%Gmjzc=Wr>6Yt{*OcbderA{A+v5Sn<=J1>tJ?vHkmo)2 zUvkm@GvvR@P6y-+#OFEX=du0+EsJH?X+LB7iWH1SCt{%OzBG-MIsq|ON#L_m56v2L z1}Z}NzQM$(YO7i8)Ad@+A zrAC64MdqQT;R0x*DZc`lfWH%M9Av0)UbDj(DQSW<;4$74a{gdHMn)3(4PK9pX$}GR zsTDk4p405?OzWtU>u|qIeyYO*((;{f2S84&%MD)wv&g8z9t`g(6`9CqV=6(>13PVy zdp0{@a}$|3>j|E2{w$$z5>wnFUgI(CK1Q*K_D{SV2WJ{%D_)(-z7d@$PDi?{EUr7) zP>-E*4CSA=9=nAm->0~DebwtML@OJ(JIGJq7q9{P$x*OK55XZ0IMUZJmIykV(KtRW z*p1%j)naIEU?4|?9S)-IWw*0NC`8BXo>JmOM_=gs0ro+^UHoD|VRCz~{DoDNJBQ=^ zyhNg+)qey&sD4WHGsZu-xsZa}ZfqZb36ZlpD~q_cCP+0_f!iw#;->huoO!du5Ie=&IEJY@7iLOWyKG@l{H{8RxpgA!*h z)U((z-(Jptp<7dA6~(yCzO6oiLF-Fm<2FaZ?MSNTv4pn^Q+Q{84nrG70EoJXke0K$ zXX^3W^|;xJn{Wx_E&}Asq>LsP==tickYdl$VsMcUwr+b6`Fy~gFoOnU=~1dcEctet zhR@F^(SLxDo;6)Ns&0s?hT0KRm=JzyUKCG6uWyT#6w|7UE#*11XDo{o&$M$5deIL5iWnOdD z=9rRtjQH7Jk1!UQUvV~GFX8ol-iQ?qI(pZn;-;qN{8Nu*V0}+cN`p%uOn)N?Gmg5a zRisqqWlk=B&&#PKz`EEdXb%&NX;StpQ!kFZ*vNxmQEZ)Od{XX8vBJ}+wQ>H&!XN`0 zBtQ#skiT=8UbkwE(Gj8;zsat_WX%%l9xcMM?+MpViboU z^$Rst=dw%ysZyG;ABjNxg$LO8P&Nn8H?`>YX_lhs_P7L9?L~;Z2PaY5(%W#(?K#-$ zj}ls$dYu66@dSLcNUQT%;tTG%?3&K^ZmGwIz9g5IVz|PXG0kpGndb#?^SrW$zNkl6 z3_TSijr?RL=GIe4wFBfH|fsF6gDK)jJuS=wDE(=;bL*rXUfqh;+GWudloM&UE7qH)+l{6 z>!;=wL|OWc0te;bW65c_@wrU7ei}pvyt#GDDQ86>Z^GSzKr6f5hUaNJc>W zs+?}S``1#tS{K~h+y$ep+>jJ57lyB~I2d%pAe*d?Xmp3gzdd zrFMM$Vx8xAO1|1fRuoGiaa*7&E!!Eb;>oPO(H<#?f&U)H7AbnE@#(6gwCRUhU@iyv z$$!3A<&Ch`>Z9gZx0(tCz$KWkA?=n?`aQ`RZPGmQEcEHl5InZ272S6O=toy&hJqP1 zk)kOgT)yp$s(xN;`Zn|mLnPZchu%@(f+BgV^JDzaO--pL-06(abxR5U@s3-!<{VpL zQsOWi5zq#FqrQUo5qT}M{O;?kIsGCfqbai9;$#M!3-$bca#g!_77gM=KnKQ{s^W^& z#6+5eXzPY?Y*`$Ym+Mj1ZFa*cV2J4<7k96;ASESbdWD8kY;1_0my@zGF(t(00v0Zh z%jd4nPJL)W9d1BI1{r{WM)vXXc{)}2aR>NG|7VN*>c4RYSj~8JY#Uq!FVoJ65UkYx zGwpu(L-Jqd&CVRX1^=$U=q-YqZmgsyIo496YH(aT-+S$ktnZ zY3{AH!u|>J7(NejLu)*k@2w5uW@+QLMj-}9qmARx76{rzx^T>BL_~8!dT>=PtI^+S zVQ9St|3XE2P_fBPwab|d>1qSUfkIaPhH|XYP@x*M-ICn3T+5%C(G2{j9u}}`k(i?Y z9fH0?mLoq5kdGSN87YAekK&JjZ&TmCy#7XaejH;bSdZWR;IWbvepTB)<9nVW(v5NR z6~U+nS_vGU(CWh6jkYxXm`DkOJ@Vjdj4Tn<$BFzg(1wh*^55N_ScaSn!%exoa3G8c zAiOP=v+0M61$jZ-4eZdPF8lr22rWAR=)c|^7x~mUTu|$Zp}C#eEk?~3URECjuj5>e zYK$jmr_XMFPUy#5OiabJm_Ho%FO>kuVkpW#UH1Gk_RwGem{U#u^jMr*WCp0OtmEvf z6b{eIB1Lp(Q)2(#EcS0=>&XZM7bmgcEPWD*?rKUVV7JvJ@BCLra2c%)-{p~rNWx{TE8kS6b zaZm^+?c!$ltBGcRNQ#Vf3PuB}XoV03m^7<>7iT#3C-incM4 zr#?6`0wdvWd|F?gkgOgZ*wDb^ThTh#)=MFC>Nsy}56m@1{RIBYF-f?xv*G$TcZkq5 zC6L{X0a1mOUZf}WIpQO69Ua`(*5#@B*?`Ib*!q~5919~^H=Jn=J#-O^Lzdpk{`*{ zYTZT?FTg%V(U|0z4379<(CJ)4VO9OOO;S^-D9a##Ya+_(r>hCP3WR6azs6W{+$9`v zsDJHkNemb5bDbM?xH|lz%O^w#=pH1QR+O@|5G!rQ{>>Z_tMke(RwQ19n6rt%W4X*s8xc59t+ zE5*Eq0)u5be|Eb%W|x-`c|Ln*-cYGd_`w?xkpVO)x;aBSZY`i*6KPQAr=L0Exdg(t z{7Di(9Td%Gv&9T}V@m4-d%Q03hYq|gYfQe$O?|B0?F#-?UYyngNGPz{vwTg#FSCN+ zmus4HQP1c?LHTAi4Qfm5#rf~qkpWi?-Z`lIo;`^`XG#O$1`WnE9baxv=n45;OBDN^ zi#Ep%4kE3#ss**VT}-`-=X|fMX#IfKU9MzW&6v<#KR&W|UF$u~VO?3tBIV-2p1FRo zTrzoiy+plgQz|B|7gAkfdU7=QWI6X;U);j*Q>DRpAvgQ5$(~aVOdGnHR>vr1;s*x! zIY;&DS{i3tDw!Kq1X9zsgW6p5x%Ex%Z&N7&_uL{w3=s8){hA6Vq?lxJs{=2y5NDoa zM%k;=D|py265H9G?>b^O7{GOm;hD#FqjFH5xT!Sogp+W(gCO!`)WQcn0))wlm$Yn* zcR;gaY0H7?^y^1f1|^u0(H*pn+Gc+2R>Agr-}KoUX~J4CLwb=$I$QHL$HBN_wY9aD zDSkf@_}&^aj;qfKxr*`h9U&Ng8+Tp!qQ~UVKK^@%l8>DKsAhP3EL!TTC!ESJ3qfiH zq}JIVS&5XYAlV-&K-#BZV&A1GkCGST+c%YtEk1SZx(?@3W5tZr-n&RGZc_1`+G9TT zIokb}S_`?xs(sHPEzs3A>EEprRL3KJ=06gTcW~tbq|~SJeprHUOl`_0}zI?9|(6C)#du5%?Ne-h}WzkU0vt<6yD#PKsZIVAnm z892OTXH$~2cq~beXS1u!V8{OmzR}2a*d7C&3VI{xxSjPbW02HX@5zj1h*GE)D)bC1 z=ixv*C~ir`vn|gfSVHd)i){iLEs@OUwcxXw3b5KR8I+nZ{5O#O9S(bOZ{zel4HEDp z1O_BMN_uP4=TEqKuMhvZ<5P!Wdi@`@usUN@GaXgm30UkARQMVCFD>KXhfV3m4I{9{T%hzXzc7q0~FuM4$nIo-Ky$_S3kyw7PBE3-;Fb9 z4LktR>2urgp%pCqUlIzsZSnsw%+H})mv-(CPlT(__q3~T`?NwY_ZZ}UQjZ->N9o^3 zHB|^l4X^65Xk4zEx;`gFCipeIZoCNuBb#GJ(Sr!0c0C8Wri4Uy?i8!8cIP?gHmtrl zJi6@abwuXYa^&wFQ1eiuU1_yQGdD-3+&Z5MS7VeV%k><$>{R>Ad!KH!6FfKBba!Ux z*gL=3ravDODP#$R>Iw1~jXvQ+Yi;>4DWD-i$bW%skWTU2Rs4|s{xv?Ss2?#Pv*^w( zh_5j}Ct^wyBxfX)8`$R_t@fKFc=FkWi6vbQ2wGX(9@Qgp@ZK<8)(=T^3Wba)`N@H{ zqiTH8*>buVd6nFyOFLUquj|sum`nwjk&jm9K~^z7Ijdyvy4G`9{YjZX~g=6{X+o1oXsA!C3-^% zhrp&E9NxJ_Bw8*A9L%CDcAgET?G$e@ImDZj33}V>r`LvHwq6GT_h%Hg8ySMv;Rm5^ z-OoF)Ta_t7waBJ1XFf#2DgBoO-~Ese6fI6S=?#GB0oot^TW zZDRD-$)>^v!7Em7Q3TbzFL2}wXIr-O1eK;0IEQnH!tD)HD^i3wom^R6EKd(7ABMl7 zxIO{lVkX!fC2d6w4HwuYFJ$7Lg<_2Q1d-6#;~}=0S9DCSq&Nuj17Ceelem|2!I0ZW z;R9LIzXYY*86<;{#X=NlTgdzX=hfb8%X>BvE{Y;7)_x6S!p-4Kt?kw64yVHhxOBlz zQbNUwp2w@lVlT?Ym$vQ7;e=ewk6XGGFJeyC>c}1M7b}~y2~ivAG;Pw(OmY+*VYv3-m6^teEmHWMMClyg zqvS=ZpuyqhF?*Mc;>R@L)`Qfq%)u7R-$qb#uz(Wc@FLlmzaHLvQ_rf3;KaOzCoqW3 zyGPZ4ZpT_KIpcxUukvZ89X$?r5(xgB62nG@^rkd|@X$z(Z^}9L1qQ8w$t;EjJobY> zB;X1W0P6+6PvMLphoAWFOL{1@qF6N5Is>G2I24MEyVLD07%|B&>^!xZoedxD;BAHqa zVtY*#+00M)KG>!73VgeLF4@_9|G{^@o&)sdN7T41k-oKObRRpkMEeR-qU3#Lz*JTG zk_t26mIifw#3UE>3^tq0+?w8@ZFYMLfEh%OpqRcwPG~x@LhP^D_3YcDfg{yO!k^X* zDFzR)o9NBEtXj0jlmepXyqma-jh>q#uJd@QFCAWGsaB-Ps5+;urH-&^T83kkY50Gd znqRNRt+-6dOKo)%VR!KG4%olT_r?GmUr*2&$4ApN%f?Wa3E7`u_A@}Sm3=~kSZ$1- zK?4I~^al|w&a4WFZVYAUsJXvfl9R7mNvPtAD)0uoxCxV0nMr=SV>}o-71*HGF|K3g z(NT0FgP^ptN3;AnXzf7{jaN#E5C5Y6IK$J&kCgX*ERoXwn=e(x@A#AVH$)N5siO=K z{XB#ytr#Y}KhCe74#aCZpKOy%a`TmH^C#l9gtc;K%%nK&!#q1nJ~xnAreb7PFOG>qfjq&)FQC(-ttax`r!wOGo?^DBhqbL zcZ#w&(3wGC(i1{fE=x8#XuNB-zn5LLYW*N)@bi6s;tD_mR}ZXr4B_ZARNvz`GvXSz*m! zlPJ1mJ%k~MY_DC65Bx?hmw;;v8y`P}-)a9H_z%wrI;)o6Y7JxvtJC0>Zb{7F>ZW$U z<@0Bl|Ki2R&$Tf;jJfz%zAq0uabG@+*bB3><1{oh)N%q%tX#k~x?Veyu=Q`IKsKb5 zpM^z1)8vL?oc@2;sg#1L))D<|uI}QxHhbLc><{LxxI~@_tw@>bkdiXE3laHX7em;+Nd@@9FDqRf3)G zBg)3xgK(zLGpbvr$k*`gw2*TE z=f`e>-N>31~ixfAZ8dB9RC)(D!9q+7?$9o64qiS5lKjx}W zzm7ceRol@{q>F|HN&Pk2i?m4Lw4vOealfI(91NuItFherZXK%~-S^D1Gj`?E?7j`d zk|`7%AJ&8hZa11>lOx+_b@cXgi@rDrRI5XVZ0sXK>wJ;!r4&BT#NwM1uTzLJxcr3O zVYQzgz#c8WSjF+x6sSQq#k}hcuM8!^w~SsA?*KMC6mlfTQ6)hgHO4Xk*@cZ@b|^H* z?FW5ph$pr2snNrv5!f4Hjh03Ht>+)R)suO7oOWbO-B-iUSZ2YV$JuFV0$+6L*SK-E zOJY82a6j;u0{L7FJ)f9q(N#oTsk{D{-_-1O$KWxNMYO2FyZ&tlVRIWXqS-#;fB(3q zholfYK(C%|V!>{-%NN1EuOmdHR~YuJT!ILAZSqIYc)hrVBx*6&`M60f!XcNS#ZFs3=wgD z=+8ChqpRtmY#6m*uX@Ss9m30+e{cbq%*B2>|6R%!$k){ky2s18+ijNnmYTWKqqRTG z>4hv<1mffr5>Z*#%Cj+zr7z?oVvX`#)*t)Zo)f=wRW4K~6K~a_4kWiC>2sa>{zU3Y z%nUZ1BDUqp*i`gs^|SyhHPe{JVD&E3h*!I;@trPn;XF6W_q^K7+M5?41jkXNFwXS@ zx2iDJFoNxV;CNN-gyd?IDd$tP4Sn_C^YZ4O@M}Pl!m1d&R&!#vUnN4gC42X`dfguV z9J9^ew)*M>$tmkUOmbjmny9nhKk(r(7Ook^p%%Z`{P6pX5C$lE1_o1(F*ruSbHQ9{ zX<-=CM(%WKVvan6i8R~T5Q|$zUe>EPZRwq?Wu?KYm_gux5K>WX4*xB)4cO2OLsb@X zcx>@SRRobZ@S+B_>{Oi8xDbJYuoX&B%? z4GOT;u?3cu;UnE&4KB=6|L}C6aopjh%G$E$fH#`ZqXV4hnq$uq09JklJ;rIfi8{)r zxJn|RqHLD6^1bWaZ;b^iVb35SHwl1{AJUwC6A_~6aVIMz#!6^!Mt4@(?u7sb;xOj?Ztw6MF`|>5<8Ls!hQYt z&;)SLKx!>#S`XhvJNfDDTR{{CD2sv2a-~GBbcx;S(R@M~wp-}3lNGj;X`&JV?G$$e zVQ%rHHf-Wjl0ZP^&!Fw^6SC;hyis`EF;&+2Lq<{g0E$rV4=Us*9Fg7EnM5)^ZzIvK z668L}(Y&*hLv@0PYzh!fQrVU1m7i|zh{RscJMv|I?}!wDDpHKyg`n?21cbvzlIP>B#`#Wr!kKM4eL72todvioj=E-Vz~I|L;;{ z5xz4@XD-XoR*k>UQk^Dw`|Gz(|LXa6#go!Dtxx;SzT9ZM#x7AyuG+=6PVx2~;Wi~l znuehWp|Va#-U6EQqyCzjinCW%&OY>aTLPoHO(`~cfU+H_4#Sn3rYzcSQ9b*}6~ly? znAlQT`~=7mVK1!UFIGi*G32rD{>e@mV)&ChH}D?1*D`Bi5IFc=qA>r)`jIi)ng!mj zF+GWvu~4;GUQ$^B&ZM0W*y0T(;PrC3`o15jES!e)9Gf@BDSS1w4fO9t57cTHV@p-l zmZi#wUs64&(dLW&1puxAjk?76rZZ@DJkKX|Dc#y3zCfOLpzlB|5PJj;`Fm48Y$Xe2;#atVum&xUW3gx*&avhsclGrk@24ucY&a243l^of+IM5(ImH?nL z1EzJT6NZ7+-@&N{^=3G4w3t5@Y2=z8*3nCp`qQFWW?}>%UmVUv0n`Vx!v+4S3Qw4r zvVhWm&<85|Rl~(+~LMFO@7X)8#&^2hbq?r&*ixNF~_`KGeBr9I4 zJ2O=`qS2cM@s&Y`rEOc#Yh?(n#+NKfC9HyPao3TSC|xC;OLpFsr7nYU8g_YaA_4B6 z)KNYEqE#d90}XinrmZCKTF!g8iJS%pQi?G0>SJM1V?^};9-c=pNOAAw&hKyj8EU5Le8!b8WswnH$82#x#8a5E5iZG@TIwCkQzekDunA>DS`(Pn zl&}zxp9bA`tL=>9uu`&0(h1w3I%ByJr;Q^o(eQWr z{-;~{Sk62w%YV|KL)h&NMN76t*e6v*X3jYzJcKMY z7tk(t=XaXSs~^Wn2{~*>>Gl>yNeMq%ul`>6AkjIntf+xF_$_24@vwCTi32L>$cY17 z$x}}tz0ND zam`d2^NCU14ef){y#tD0PQ|4zTiEb-s#^wAibbiSW18HXb6Z;*mi5CKTjyL-4L45k&aS+&7Cpb-OonLV8xi-{X10aIC+Amf-{x8e z0FC@DdyL7v4;f2 zUNGEMJWo(uY@6o;7;2Z|P-PD&xvi_yr9gQ-xC1H9^%K=suzCqPwxnt>Bd%9`J{eDc zdgOVvFcUoWj?5gWwUw>tqj9;AQdK>CwpQsFVK#Y4?ju;+M7yj%jom;yERJh;&)z`{ zg{WQc(cDgB_hE+luzf#3l5AWcUa9lIpj16nweR}7Aw=n9?@{jR^OnST=MnGftt2(0 zO#_tTWINgh2+DhV#ZW0Oc`w^;Fb!0g03OND20<1{F?nv94fPCVrR@?QmL(oKeSvi0 zzJWny!;&W@qOP!3)QPDHzbK9Ny-FVb-Ezr>0G9{o{xZNu1@ee&9A9!qKD<|qj_D{h zIDp=LPwkAAy*ZUv(ruHGXd;xw1V?D+K z2)W02K9Al=T~-4b-JFK9mNGG`*jPqY15tD(z4w>_Z#=WQk=!>2R^ig3%zgR`(iiT6 z#uA~u)0IYFIWu6=KtA%ef$r;PFqpxb=QGWh^zRW3r+72*94UXe|B-#2Ht)kqn6H7% zraZB)h+oS?+S(J0kyS7zGOXv4+VXX}+K{;PxzuM>P+8^td&J+w)1R=0bXfWA$SRnC z{*sW&6$A^h^L*;oAfjma*eNL{ot+()t}2@#Ih>~N8wuzL(^TTf1DK`*NP;8Kb5Ad4G~&H4<{xsANd;HH|1$sQcT>y@Wk7mn!(RabwrfFdelH) z^VQ2+|F3#<@X>-t%R+bj&_BBpWj;rB8i};7q&&{t4`BR+|5XF_T;U;CN6Xh+3_f=5 ziD_#l%yk;2O9&^Q_w*@Ql+p39q{k9`kI2xLC*$w_(@C*nTU#$-D1oE}lJ}ju0M|{3 zW;!)R;T1YdJA$Dl(Hxvr}{ER=u>CMO546#A6WxINi+7yee7kE)}lXtS%<;vuRoUM9kFYyk9Idb zeh>f}RFkZ~QSfDrbdBFJo%RS@6XE$jP4SW_F9RIt8ch`QXmBCFI5|^DUl`b0nIf*5 zh=&RZQBh9>-mEU_+k7`SNDxS6x2F!os|*yt{|%TtgrR+dqZ#w>O5LsrmJMY6SAtyl zRYOe`pU5yw*P6Wg5+0*F(U1;#pmCJANx#aA=9Z(x_RL{wUzt*PI=Jc=i`DM>IA!>y zKb)X7>e0T=kztDkJ{07E=C-}rc^fife#163)%b^E900H{e_Bs!AY;YF#l^U;Ki5na zBge+Zc0nM5zkU0b-+#S3^SeJuH2dNU-oJ?El4G|>xeG?ia^{5Asl^GMZfTb#m4quhjL5%*Xj6Npy5%u zdt$lUZJSKCZD(77lHZFCCl_n-04cN$@+O~am6jFN zLDQjsvdZJx55J1bs+jAs(3HEn@Jcb+2&T2?kZ!tMZ;;Q`dE#%P0n&C|=v2KE^}s%9 zM`w08V*T{-T!RnerZ!u9aE7Bqofi)}{Gbh%B~n)Vfrl-aarp*=-+G zX9>`813`f*)x zJ0Q1POXs}!qy1@uy8-*H#cWZ6k((ux9q`W2kmUz=NyUS2RmYy6Zx{0q*i`6ClV#h< z*=Tu74Nv*^!|KvyaOO};t3U7;S$&Q;$Pwb*Ypur6kwK6QOT|!+A%0`sPFA3)28v~j zmUIg?U1LG#>71l6h<)>@c~!TjJVOPf zQTvcghgn8~Dm1iWA4g_JXEqDBqHiBapp)L7dqOL>iIUN&W}8FU`$c_23nO^@j)i7p z3nWt3@k5wW&`5Pf&Mm-vkZ&LRSoOALs6mEo+Uh+!VON;S3!VZ-rmW~ z+k&4sjxvrF!o%SOkz%OmF*Ex@{Blnp=+NZHBp>FAH;!l2;CB_1;YC`~5>kon(>?0 zV&6%ov9v>A{7SRVBVdIQZ&_UalIHidT!QA?>#wPNH`&qYG9QVPbC`gMC$;PE5@!vb-m=6MWm==~++d~bnQ9y2rZ(ULqT>r=m=t1H8? z*9qCVE3!YIBacd(D73UQ*#OY&`20!i%$@rvJSRmuX)Qj+)zt{PE zkCpuOLi0cLwIp)5I%<8x8qQI!{XO`chU$%yY{FVrZi7M4^2{Ha*CT$6MZcc#QuTW1 z<>c3y294bC(JgAoE9;8!ZzBDB?b3Fs5;pQyWFxU1?BtoANMt2lEoem37BBxS^HcjW zPG+_GC81vw`O4NLsSH_L#ual#pQYCEe35qdEq8-aBL6@^$`#%42Ri7fH4nn1Zp&12 z_qp00-v);9rmWsTNhME}bg~==ENt-YL1vDDZE+28O zBo$&W1P!k&qzt^!6(pf85j7eM-%5mKIRQ(cO>1tVmVp+473TIsB*e^wGt7n(U+YkS-a1Y zl{DW85l@Y6#f-oG{ijDo#gih|j-@_oS{K6WMJN2<3y};-8%xEVfQMQyO`PwnFplpy zIgZd#qiT`h_>vp>RO=%u81XdHxQ8rdbwypskWCQF?YTWoY~1P>h38u83Kp&p9BsrA zd=IAeAiRp+nh+<|Zi|cWv*3z@Hs`jbW>vp?v2-FuU0qNTlSYfnWk1sAWjl}AO<{a4 z8FW2Mwj}LMZ(ikTfbR2>W8dG+34CedU(EFpw}0$XWIWV@)>+<7!ux-68vtH!Ywh$; zx}0HVLCus`b!RG+dBs5OaTo?e4P?d5EG$s}POSa#%19&%gD`CJ0xa*8bVqB6hd-3U zy*iFshsMXL(N?UZT{ACh;^&QwhXw~p+r#nA{`|n;xRw}Vz_Vf{ovc^5X&jg9NFM5d z>sW8{HEW(>(e)??;DQH<8wd_AkqFmmF-T!fr|c(%@=f>);6ICRDYbxv%1R=q`->v7 zi@SO2?s34)uW(lH`SdmY!uy$u$h%AzHAsxSI|6~ppTADKLtidVUCLV7*dT%U zW($^t)QFsmyZfDW7EXwtu&^-pb#2<&&A*dQxEE0KM~&zg&Ho2`Q+0pSB}Wz+6-6Uj zIsGL7h>#s~Qd`Nqw|XV^zt6RfBk)(>Qk?0SX5RIXCN(MHZnvIHdjH+vH~D)pa=5Ts ze<8Pgt3wuX&s7V&gzh6#oA;jFeN9up&(;kb^x~LHX#Tz_)tL^opfacA#Rl9Qvo!rj zq_{ofZScM&(GuMDOHRw&F{gdP#Fkxg2mAZPo+|x&sPw7$Z70$Ov3;X%^wbMy6V2AK zywpEcX)Sy1Y#h?7O1XOQ?N`TG zzOISNQH*21X5l+10W%2QIq+??)b~sTLTVJc<&gJ<#*JaPT5xq@{Z);NAKOPUfsT!D zaTteDz7&f2pkxLG#Co4oicAf*{BEwjb{A>P6dFrZLn%k#hre5avu54sMk8)#%u&Fih0!dyGn)nDr{LAcW*$?3+Xuy%z5|+>gJ80C;Qdaa79t&gvV&d^ z#`v~arav>l|1|d+YwBq z-CNa^xIjUE3F(SYA$l4$p5h%)HU~0y(U1D#9^Rg#A8ou?>YVpsjt1hW)5=f>eaF-N zV&0T_H=YmX^Dpa6ne!faUz2@0NaMGs7k(uB!3{Y?uO_MYR##`bdpIXIXH*MZP@qb{LH3%lh;T| zV;}c?8mr0^N7ebZv<%bxc*c zyzY*5&tk4%k0#1IgL3s$b-SmAns`qgr4$FtINfJwNo^3)yA-86LyYGXsl%DsGVkBd zwzvI(>=zbhEG`htfFQn(xLU%zTSG#)a7R)HsVcxY(VwY0A}U5*YGAxb^^+hB$e}%R zF!rw)nNW2j=l9FZ3KP_OJ@i};-*6_$*5OBV72va;aK_c3<)3=Dn{E7#KmTWSSePd> zc(|eR8?}^lVy3D-uf)Y(P%w5Q;&MsIvV%><6Q_M8+ae9y)koy9J=+Lp$QRH+i#Nk# zDs?YU_PdADQPsp|eI6)DCqtT$HRGj6c8u-Q4)3VE+@vil1kQ6`6H2UHy~#ZK^^V%p zO87ysll&O4Tx!DLV<6`K#yDMj0viY#$(euJ?9uHl-&_9*_&85T+NKGO<(!dy$Sxke z)UV>$*zxQTl4xpOO4&Zie@Dsox3TNl=E5YhI_6o6MMrPO8y+^O*o`9AvorwCwhop9RgAmYpxgKJc z4>EYF+F=$qU8^|Y3sVoKUNG1+E~PW_OZaE7KV8r8=~T*e6k&Se#Xt}|a%etwVzdL>NeUP&<-kG$Y;#HFD zSJUQOkl=z}G|ymqw)_~E)BmVyN5i>UD2B*Sv)10{ItozoAZ>J&QkZ89aq ziNA46RuqB1b=J1&_ED@wU0=LNOnzd+=Awoo>po9PhdWEB(Pk}_kQ@KU&HLpt`j*7y5|^{*qTd+LH>^7& zZEdOLQ@OFt!m47E6OCJsSDKERUN%~I@^llq7=4ao4-%@6&++Mc8ExSSG@1>l-{XZc z(l89w;7^N)v4jM}6n7smWk)Q#qfu4r?7;Ttdz*kou^=Suvc`q0Lz(??TD+Y}*9-Be z>@3MD!>|gjq};!leY?JH@$dp?uE!+DWok@u^F8iqJ!CmKGr#>(YHFa^ z(bIM*PfGNRbdy5DnC>za3Wl<~gktfcKcod8XNRBO{NzFki^S)b~={yN8@~;qlEcRK#%L@;RZw$hZBZ+R6S@ z3N*=SQR*@%>U|$_U;&b@5>(1vGKb@v$dq*MZl0w_2E*EWnP%R^=$3Sp<~WcJPK({%>ZXIfhH1O!(&eA1qt z&jb@#kB-kl zg`}V$B{em5R@iYEIWykP$;nAbV`HOB;Qf`!ogu$rMNm!-_toh(r$sS?pb6i>W7V&D zd3n6M9QWa3#jxTC(akk7&+HRdH8mnvSJw^)-RZAd|L574D^PE_d?)hMKlqAgaQO0u zp7nE*Nn>ko_2N>pm)z2PINLF&bVR7HfdF>+_96VZG2G(T^jh{tP;?>Ye&CKC3hK5n z=zC#^)3w;>%RAROXfi4(>l>V;s)NTT;OzbB+Jn-iqya~LT5?6YVRv$1b*=zcqArl< zqqtyq6@7WBe$erOp3+ zU6?92e~K<)4IzsexHPQ%mT%tIBGFYYjblS#woNBkn7AENNg8I}L&sK_Dhig?*X|4p zg+o8&)8@t;gm~~igpbaAHxRM`hh2LG>&WSQYZ;Trbn+}=*0`?FrbNwm{xG2oo-Xno z^zbbNIyZ~9L~KgIB?j8y63O2t8d8Oo+FRg7piUZ-uYfHhI1OxPN| z8~I_?7;5vHp)L%aC^bbwdlLhzT7geNT|Z*~Q zha`!-wu=e~m^U2x8lO5oG#IIL?d$1?eIZb^l|%;Z(uqpkxcU>~V>eu5xOJFCyvX|V zXlugo^ki>_>26?$$YngtM#saw!k%EX%Cv15J23jApfIm>e$we7R+Bs zAGKox$d3;bp;OeoHW%Ghm9*3i{twUb2U;^_P7oIN3MsNRL^hOyUDKQzE@=0=EerMT zctHDF6KLWHI`%P=tpzdY6n1XI9Wc&^PxwshserzQ(Ag#3tpj0j>il*4z&OK%!tGE~ zwVLU4jSZ6?~84lY-{>cIg zJ`?C6bHcJ5nb-XJI=;SJGoBxVSuVEmWC(u8vzS?qWA96sme_8X`SxbraeDTG(>#rLxcfFylD^w_*!od&91 z)A`KnmkXV+i1w0)s~1dFk7u6z(;CS}#5PMreI+eKl0_@30Yh%CU(IebMzRp~+uu`hP)6#xp2e zrsUMFy|r)iQ(BwP>gXd3mZ>(9x?UPvdV-K(du)`~s6jB$yf`Xx4w_WlfD=UJJ1|Oz zNV;A*l~RSQd^++MA(w~XBI;BaO~vOqJsG;^t+)D)ykmko6&mECJ1MsEd1t8+R^n*L z;x#qcjqB|1Qg%&%6`S!UE0>T6S~jX9U-7p@BYreME_FN>u>>%9U1UD=RF|C4cn$0j z`rdR6O!V131;{Ns_+Innt00Tz|JF`AY)82DQH%FMhXLL()xhtPrdw9iK+e_`n^WxD zt)5pRW!Z_rF8=($rs;*ge(2wqXrqls+pc4iuZcy)#a0(%gW0U zh@si{B9DJyKqYBnkxZ@Mtnk~+jXIZV_)PPw8nD(w-#*Ir|8=kl$bU*$Ya<`sUni_* z{*_gIotK$Z1@qg0ArEqhH+TJ|R02Hz`?pAx3)8%4by|LE7$}%pwHa0OzF0XWP>|kA zS@MyE;EC^;x(b-Va;7I0V#~j+JNst1|zuF z*0dlDIw@uiEp)jd5r;)$iz9`@W1Z_E7((Ye~$`r9Uud zjB=Godl!atps}j9TjS{#oA}o!Y`B39cXJJon5w%wjPuJKR9y=VT9QC#=VVXY^E=m@J|=iOmJriAV1fO^$|x_SLUkZN2UmWj5c(o?S09Jm&ftk#8*NL#;9Ikr>3pLlfxm( zNvqDKyjKyC0dmpjDCS5}drtKV8>K!Mp4y2}R|g>5Dhf13G132sP*%jmT%Pp3-yU@u zsJe-WYJD)a)GQ2YLu4%gqZ1LRpv4OmsK)Oe zvRR<65tO_|p55e1AtpV3Y|s;#{WGkI+QsJz(DG@g&^L(>u`KTA#U8eogj$Ce;yExL zJwHm67wm@WM}c>FPHRMm;ehhY*pCETV~404P3p^{!N?2vkmf5;`>4*(xFzVBJ{H{M z4gx-zVxBCaLtF4(g?wBstsVGVvg#LD;(b3+^bT3qZ`3>uTnlgf8b5Kq#r`CR12@S% zp>HwhFgg)+O_x$95yu3zjhu#lUgHC;$~aK%O2z`UrdH%|t7{%nPnk4XePsr+5Oo0ayua+n(W4D zzK(lt(P5&n6`)vjHezxE$MOpbZZ{fVM;zKEi=9$Z@9Zuwa~yChdRtu&+TTbmu^Hc7 zVKyhzN=i!dgZrHVthgHfj|`VwcAHl*7!2E8CiOgQYV9pqSef-&Ty2kGQ(8g0PzG*X zemj-qx?8_k3^ZN6dy2-PD_Aqo|(RO{(gRj_ATp(nQ=EIe7QHes3azFYmyx`um_r!o!RW*byO< zs|A+FhS^i5%Bd63_$xP=EE0h((#7cuF}ybCt5a#@Q)TXa6N2C<+*#1_PLF?$kAlrfK#eI=!&p-yWyK&wfAF5dDHZJkr6hoL=S9fYgRh8Wrw43r|eoPfey*xpHYVanMch+IvQ* z$uz>kc_U@s=EBgIz8v!yUJtiz_5=?P($V6+hkE_MeWSGe-Ftus`+uDzRPX4l&2KMX*1UsDVs=g8FT3>4**9#@QFK5Z33bjP1`=fk@J}Aro41!O6U{xP_x0@Wx?B1n zK~qPSM+sx!FznPoq`5Co0sf)^52W3{M4|kkOenv5q4cxCI!^Z@DL`F?W&}JGqbS`UV`PEsnVXyA(2iC?%SA#kdcHFQura@< zy!l^5`ie4oDv|v&ig7>sU(pKZSBi#3Wl|e5gR^piu?=^2vulf4n})`^7~NS`V>v0}hXKtb0Y!ZU<1(4o^m6Z)p~-5F_{?KbAl$C} zNmOxVE&oO4j|l&W>|?l{(e%7o5~a%CJiC&xNE1s$;%sXc0b(sDIzpluG@KyzEEl*s zj3!qIA~Rt~;|_=oKVjFf2r!)671{z~ni)hXHWyJ{D+Y@UE{RB_kjGJdu{h89%yo}k zwu_=;F=y9$_@6LW!m#(Af;JhYvcihz-1(-r5)dqGJgBdGcQh!q%qcus*!Dd5jOJUXvGJU;U}#DfN$kV_+*+g9^* zbO)&nu$G^Xz0oJhfA>lcZSM&h{mRn12euABj{C2AXeuIU#$I>zlF>h+tp2j|7qFU- zmA#xco_nFOinwvl3F?fl6!pIPK@3pce_(oT=dK?d)P97gA`c@U*?AI_|Eel{L`trg zo)Z?On#`^J6o!uU6noi2lt>+Te@Y>Vb^x$>SSB6Z( z`bq)VQ$aBd7k$`@H530s_pU6@7#1K22kK88xLujN8+{ zf7gXq?D?t<^vSF z1$@u^I4)bhbQ?7Qu$E+#09EgJ!NbET`@KtD^BIn!#aqbAs#TjYU!1D$3FcFF;>L$R zqA2)!6e=<<1M!~EvVu)I$Cp=1jUpx`5nd z;M7|EN96^vxvmmdg&@H+7}vvNDVSAaZ{-I@u>Qz3Q9V-QSP2XMVcuk>cxwFnX|J`Jy#9ru$jl~R|a;lZi z>A^TH8ck7JcGKM=q&kXb7FNJA32irQS&qSai_ta&Ax44L#aEvDY((U33o`XCS7YWz z^*#h&s3*T@mQddvlXREM`M)m0>-ux9<9PEFI35TZ-8TAK^L4ZFD%@RMg0OF4^T1AM z(XZ(~{_ZGTjwqae^6kNNIS&B^$q|Mhtw$Y!9Va-q@pKb}~P z|8>=fUGe)rx&Qmzp92sC=JX~ipPQ9$YIQfjsc^Kc8diNnE)`hax|qegB%njTD>r#) z7X_0G@7#N6t#%7qY`%Uz#uspZ;Ba$vtABZwtZ(acOeadoq&qzn5!FSQpPy%UtN2y) z=`$x~e41&^*rbnJHADbChA{m6y~6J<<*6o1CePx;(9dTYE_^P`+H7-3Ab_^g2XqzJ zdpuUC=Ih8b?k)8ih3qXpW#b8%SeiQcCQ;+~*s~}F<%m>WQaXCBv?vwxd)5YV`<}=t zWg1z^+fC(pBI0Bs`4Lz)tX#Hg0d>I?2ZpGYN?MH7vf2BT-I#t-oKm}1_QIrnB=@U% z*asI?)H$XFm1$gR9VRC>!2A&(?qjx)FJK_kC|DKkp@PaLHmQycI(DcOi@4a1JV|CS z8{CkbYO*E~6J(D0R*q9@wT-uDB#irOgQELKW+T+~E$xpmlj6kH70n*EJ)&For|r10#Sd&h|DA z0;$g?t>&?0Y6w;AtL%{AI-rv_W;(=%pp_r%3Pi`I8T$mx(eV$r4QGIkWwY9%N?g7@ zh}rBgg_szqv_d5Gt?#Rf@`@l#v?y6o?Hi2fYpvNhFr7weq62`$o)9VzGL)vR5E4-$ z1?pKHt+?a?kVEBKi$RNpR;FB)ZhK(0ZK#~zKW@W{OPr(~ekFdmhK6EdBD1`nB7M=g zq5R@A@~R0(018OxzGqF@ZDNeE?4;{Z-cxlNQMc@5Re9ba%M}_r^p&NnRgE)8^+5Rv ze|df4H$N|)*!Hzbq%zCue1+z2d&n1>rf@oiUBK956mFp6uf%m$#G+s%EU^Ntc1)(9 zV6Xm~rS|vWwJ34?;+}e*eHZE8x3=|yOF5Kei|LvXiq2d2mZA{sCh0zA?J!yljX*L} ze=ujU5Oh|V6Ge9{sKA|QkH?Ja4T6@EgU2pt`9QEqvUq3Su2@AY4nT#7xEK+|tSw!h zvtqQdmXeL&-{;4J)rjC12r5U)AN%rc{wcpH?CQEBm#=^Pw)f;+B+7Lo$$}HEZuQ)P zoByo=afcjvLd4li_7#aHsTKAWb%fW&gVocLAj}8phMDHwL#!@Wi5pyo*@WHO%`4I& zz>ZB{ns_$o5Jerxf7gryy5|c>J)6H+91fp<*I?y;O>W|UMsE@erfdGUA)R#36q7EN z+0;Z(IljkUtOPd}1@NaY^k4C(bj7fWEG^?oY8DrPS`u5GIsLsST++LIo_;td(;7^R zOUpVw#vL2eBPK-XSv|Jr?X}Zn+FZsJ84jSded@_8K7?U?(R{V@@+4KM=N^V+u)tb6IElYFA+&i>D`r4qc2Rj_Dy4xqg<2X z8Pt~&_4rGWd_jl+g0c~d|BTfs&Gelg*sxt%8{I$LxIc6)mrRn1yW$@8EJmd8*(a3! zl8}}$q1kVDTJwE77T!_3a{VAoN&{=-Zx&?PJN`iq_eDj%aSCg6O2s1%?5K4Y16N$( zv2RO4iq-@j4d{d%rf#7bISR&}VFj|75AR{cSY5gmvNY2Ldjj!uzyE#5JZkEuut1lg zzXib@dwVVOTv1bpCIsF38oJX)`jVm?3Rvc0Eo3CMvaohj-Mqw(ix-&{9co;|Y|j}X zoc!zvNHj@t^m>SUij4_W5F#rWuSTc!68v_kj+NftjEIhoNCSy;Ee?H3CG@+K6 zi~^eeTZTs=Z#C^L#9{SO%)M*7j1NXnRyf+b?VsBHq#nAY=66%_Fh{`3Tu(lywZq0F z)naiiup{4Yiwq*hfI$n7*9?YhQmoZCJc2<)#Bx6fOJIEDc2m)2e#7|a4KROp6vj}% zC864C+VXxcaV%CWWLsCfdyMVb{WvqXbVx0Ik`yE_t$@%4`~yT(Mw<+kKC*gs19Ofu>Y#xZcm1*< zG2`=Mk+Twq;Vv0sN2QO*>HyYsU=tRM!Rm>nefyJxkFXqPi{P*C3LF%R(% zc6Cr#>1+aGedKXne8?0VQ-ouB@OlARyee5}dJNCisiz7jY*o}g`EcSoSeVN<&0uEfX#w#jB=4#yt zd34g0%*@!|6dDBEjpjzqG?IN}BoYSSy*u-jOL!3WeZKmhc^268#LUbX`V4AQm(0re zo=05&Id~uT2?PS+fS*9}hKGm$%6tRlvg5^_v$L}r_wCqx`2m4j(|c&->e4?#W?`Fw zj(;bcx&Y5(@!kJX*Z{R{fAQDxVlx?v4N%!sf7q0mtnLEx+U+&l(a9j7FmKoI4=xwC zh2>10$A~P%bpLx+&bB4Nwzb%>xEju8vbry>bG+Bd2^=C+gX)55GHuyBl!)d!n%dCL zM*_c;&sdvo~p8aMB$u{ zoCQorn=|q$QZ)p57e(#+Cxlc1w-N_NlBitetg3`5JoVQ@v(=CCQ<@}3FamjA zV4`nuw>Rx$UHx>7>j*7Q#|NRd)w@&JD_qNXHYg6G@HwU602EfuwW?F!(U8RKrZSgF zDE!}&;gKN_MkjOtp<8~|D%w2_l$v=4Gyco`&JRK1r_k21B&`j>t^&OErA((dY9oYU4cOtNN|hym|t^)QY7^MN*Ub*TaKvnFAV6hsI6tyRHI1 zm6?xkTZ+5=xpl9xQBgnGj-qDC6~4lPjug-;G8Xn zR_v1aOV4PZ_>e{I+sX7+k&+ck$(JGV9SfIv$WpHEDg zx4p`?xFp7!ED|`guqR zPv|~ak!BSS-H00I<#YIb5VipB)a~;u4+4^c5iiGNxm3#PCGdu;y^vR(WX8(-OD>N| zk{rmp=NA;=hw+op1(NBr{fKM~BdB+4YEQ&k4dNSe<<=+S7-MPR=TF!_TKgv$R-d?f zFyie_#;PAxYK_$pUK}V$b2#+xcP9m$pnWgH`ga-x6H{#Z@+J7Og`VQC$|i!38~nc(g$qVEGtYmw)xZT!S@$|%c;O5 zHP?Jng1suY+^+iKMtH%W{q|8L@-UDN*XnBTAB2wgC&$f|_>237^MCoVmwxmByCsk9 z7%l7G{O{mhgU^Y|)|tNP@-~veTfyf%3VG4o3M`gh80$qvUH{7$;EiP$m}GIvSA;8u zxZuk9&v32e04+jq^odGuj?A-NnDzDbGK!0tv~<#j37aSm1&_XkFRm!|9Vxfej&r>aiE+QaNZC@y`cQ??h7U%XtebV5S z?gOJXG7K0?-!*kD>Lp#oReMj{V99>|a~{`44+UXTt!hEcLOkK)G;qd0Ni=u8BT zD^NwfNJqDBlig79Lg|IZzzRev&zA-|psggUxXBmRXcV3(U;I?aaEvp4hfw&mzyI~K zRjy@-bt3N>+zaNMU=Om%?Fewce6Bk|mXgzC6qsmUfz?)l4n}ZDN0wWxz4DaQ*!-+@ z`{Qy)ZHJ8$-A}Rl^^?nwhbqG!PafoDw^YXeo5JVvl-JOmBF1^a(;bR*TOr!n6Yl2&Jo#q1(q^uDOOQ3U)Q(V6W2~h2MA)OF;EsGZ^YOjjbA>CAf z<%%n;r2(RGqFaXdQYee&74`RLuMWCr)#i8vA~OLeIxknN1t^9d2;;r9O=4Hnd=IG4 z%JUYU3>ws9dr#g-ptw6Z#ROPkT*RQb6*X8PSSb!H#q;p_QsQbs9L?$%Dz@;_qep#H z9~e!VKX#-egAu~tnvLNn&f7@Arf1C5J%xm{aaCCh&KJdK7lh}>1RGN_$X0l5d0ylu zHtvi~jHFK;jSidM3UPh=He%h|9@qtTuvb3^@(Z1@ocdAD)-b9&eCR`BGrkPW>rH)} zE-%q{+3Wgz)>+rYL?Ga(73+&>$)1vR^~G$p^Lk@|>FVv&cwfes zz?6>Fz}vIQzeRt}OGYja&lyg=Bq!T8F6{O7C*^$Sk0^yM!|_(TqO${kv(Zo=-c&dJ zf0Eqe*=CK^X5YnzTTo1TW83Iv$x+-CZeaUZZ{YAJ2gyKyk+ySipdE|A^t z25yc&WU%sdO0igdp~~)ebxKMDZ|6eX!rehzo5cZ zqmku&uj2Bpt0e?caUssxR$v1dL^)apjW%Blc5lRJC9ls)fEB@qUsZMFt$< z7Oo!dxilACRO9W!tBoh1IuwPO5|YDu$3WIqY?V#b;Pb$fpCvj~Bb?KF{1IeUk|6=@ z)Y6(-j_~e&Iw!^aF~XtSEqLJW(|}+v9K-;B`6hCOY3Y3CVF&F`FA&0E^Gj$8}%9FN4yK z5U}9zyisQ?AS15i?`z+hEN`bHL$+o03A>RT{~61&IqG}+9?V%2h}P|+-e&ul-8_3o z_7|#vRUdS#37wn7gI~Whn=#|%OCDo%u#qrtO{7Q^8N!Lfd2iQh4vx<0I))9?H7s;@ z9FA=|rn@_)`}=x-uIqDsuJ1qK2bXg^p7*%jQHmr9AaL2bMv~Y54ps?CQm+GOwL%E+ zy+1qQ++B_LhDx=Z_>55yXKeme$8*hjJ37!IDk0YR!NN!HTKwB+onTu_-3K27!M!5D z1{?Yo!qFmx4#dI*V_+-WKV!q{BtnnEG<^iG#lBft@&p-AMH~3QuFofBm+5bJ9Exzs zitI-J-l83zn{@TbMqdi|KKId0Alms5nHp3L>l2B1pBoi4Z@*@_0tPR(u8?s`$yzI& zCfl-P(r?~;1-|XPj>;Co1_cu|D|$Xpka8}CSQ4bU7$zk1+jrS)_=*sK2$hb*c6PR~ z?v|FSc6V`7D_loCcb=)=21IN$T6-cd@3Sx)3x*l7D@AcEHqaa_Tw5vA z+xC0?KFw13N@P;Yt8^Zx=8?)JiKZmwYmNY-Dju4w(*Y)iIUe+bue3b)+bVoC|gp_5=mI!{>b z#z}iM9q+W`&8@zI*oMv#zLW&2Xka{gLUJ~s8j1-LG#SpS}T*SPE=De+0K7k)bguMZB-`FI6)H4Y?Ep==AYImwL ze?Vx8Q|ZA8WQ*8pri4P?HO>mQM_2geX{DIP`h=^<>f~%zNv0g>bV7{b3rPw(a!tj> zkj!zB18=+0Y?-^)<9-aH^~cN*`l?$2{R7cBabY4R(SOg^5SL>e1Bu@)q zc%l#?WPInd1ZwP%r&YGVr$?Y-gdSUB8u~O1aCj>XS6@Az)4qI+StDYbk{&0&d-Q+O z+<&1w$PjE=X-JA#V~`5$@;L^9LXw0nIw^*&qmZU1O}n%FB(e_?y`zoU%mwjB1tmX0 zi3bqZGDHzpqVM%eSca6Ve*`j`fGM{nN=o9#t2bJ@W{gbVe{lSXNWC5_iuS!P#(`b@ zeaSOK^}WeL*=D<_rtahIr^1Sc6sZK-9r+uQT&ds~wj}x$D|rGMM@e9=Re5lwNrZ>) z3b=v{`c|sI7oQ`24Lunu^Q4jcRGKFnvILnlKXy!2wY5{2eaP$!{snW#9lgL4G5(bL za;@ygj>Js$;7P;xI&8hozL&q$pNZR2+X+p+jd6Ze?B`9my=_-m)ArUB=#Y5R+kf@I z@B-H;qk&5M;~lmV$aS2uV=B3S6?x{L4h*Qg@bfYUM|1VPKdX2DSr=1~ChjYGE1I+= zN@}d2u>C7y#t7*5nf2Wm#R9$y+yb6w2O5MW0iX!~%o4W#*q1vC_}{pp9qtVMOsRB8 zM^cnwd+h9PU{v1FD3H@zOqFo`U{bjSj!nacCORoC%p7<7@8b2C*b=gOY)m<&IzS|D zm@W6ng%nruy|)CeLvG*LjAJax8a*#)@Xgn=Vo%Nz=aFGcOE8WRSV>2TQB*swVT%U2 zupO+8`F-F_+d`Mqv@h}Es~r9|2)}m5jI)LHVXoYGNfoWGYFSfo3O^MUX4tj(O;OkEsB1 zTB>O!J+}AMoLjdD-05mgc4`%NcI7u3a&m>_id2A#xvIk`#B}>yq7+Y0P zaM#vOzJ9AMWP8`?h`Xa_WW>IfP92;h89>?H-EG+JFK9QI5!M}!g@?uU)Kp;84`*_{ zVsO0oA$(d1Tb--19jFSpa?KWl;R2)dYcr;2R|F63XU9u3vG++rQM*7uJEFY293_#P zn;Rtb9<()*$HUa^XQHE%;@ftugaTYmW7pLy{k8xoyQgpD=wW2J|0bH!-`}5UEogf< zhY58>x|zG_|GSwUwgn!A*XP|j5NA}CrhD3F;q8>xIL=J65x28>Dq$C4Xrc5Oj$HfqP38c{G{V;NzS_NwLx1ET+Ki(g z4?Af8-Sa{ox~{MC`y_3_d_vsRKs4GE(mg_s7v+OoEw6=cj*9h~F2rZCd0TB4uFi;d z)8REMzkX)BY-GGOf4P+LzWt3gZq z1iFK`Q$!o2(8A~Z*oKhdIQJy`o*I(dL@Z^Vv$FpaYvgEeXMYA)N@EXu$L3|eQwSBE z+KDj9iO+BhVFuVTIu>0e{D60|mG1Uptou2Ue8LHTcgAB$hD4cUn#Mr_tk&OZRYXY* zWC@2p9`ZVd(wW%@BwX?#-F*OCtthgf{V!x(oou}gUs3Hs!25bhKKt*N?|?ZU8NZsVSt0pgj;QbNotL6Xqv(pKES*2b{T)K|Bn-;&?1 z?knv3J9U*tW13m0Hu*RbgzSm;E|R0Vx{M`VUX|_x022I!zlsa6P-hcgQOEB3A41?m z0ZY3D(!+$zf*e_XL)yjhpTF@yDDZyzyegjOg#dC1wglU zbWBC6<#z6?H${AW*(H~iqp2n&uJ(8$`Re@KdRp?%He+zA_!7S z2>0y5K0jfIt5^b(Oj}s3=1Wxvt2n9L6_A}BG%gc1hdHmvWO+?P@vBI6B7+Q{BUFBP z(a{={Om$e0Fvx3%P@a)(m3yk*J0e%z9yIF{R*;D9H8(FeFleki(nE-Os6yEQDRdME z6sBre=rh25!yzkITS6OE&$(q>Od%miRedC*%XDoGNtaFNQe1~ ziOVBT%l}-qSdmUf=N&dXNQIFxW{xS*!bn#N(__K>RJ@3kmbahT+7CgSPC=r22X%Eq2Q=quxOA? zdYL1qthE@crkiS4Mozo+AKvlC9VvG;0hKOm__vZrrZ{7@a10r{nW0fpsf5cSXUlDm zgwUDRjp@eE$ovkg01_xOC|WZZ4T)}$hx{Yi@4k(cWr1)K&WC^6kylO>`vB0Aw4`SU zj(2$F4L|rpHT9F;{iDi~YC{G^DDe4BBf?oyXyaXP4=h z=~J;!7%v}Hc1rVtKOxqxyegzu=7A#1A&_Reoj*PS&YQgUz*wcEtdl07`m$D1((mxI zQdZ9q^}ob*ehDtop3`Z*wg=_e+g1FE${(^WdenPPue8nlERJX@$cdX%WIg3`%8X1^ z*8i)rX-{%<%X3|nmvd5dHAS6YYonV>gv6C5F|7P9dMzONQ_039(tS0J@BFJLJI9vt zwg76tj{!U_i{vDGBBzrrTe77#|0p&h=hLzdG@|`i4hKwo=}He1-LRzQN2s|!K5osL zqKl2W!KW-vGv+*{zd@^zj&#PNb=wKi>HNB%*w6f^fBjaIbJd`E6NjhlhxUBi{Xjro ztJ(Jpro>0Bru3W-caxnpg&3Qrsb7X$zjJDKt&xhQiIl2+69QNhwW&>z ztValmZUvj#z6WT7FVq$|n8!`CL~<{6IVY;6r?~9Wp=kdcO`-HE+P{F$)!?XMT-ER! zI4K0EnlqP96!yJEc_)UYukl)d?^}`x9b=<< z;>$>~EKGy1+;FCB2*$K{Ch+O=#^}0E z%q`7t?ZFBw%-}-1Q+rb2wIDrow1!3DD-#5wtZjN)SE_roqHx*~)%NWKQ8xonr53io z9IQCPP=88wBT`N~pK7gF9P%GJW7*L_cqbe-ybul9cY#1QUb{|Gh+VxYURo@{Z_)lg z22*T|tzmXXMDK=Xl?b~i>gmBtxTZ{YOyO2=3L@hw9@qlR&uC1wrl#*l;%ITQq;>(_ zL!jxVT`9sUgn5bsjY&#UG&LnBb)X@kiyq5*!i1y^_!$$oXfELPC;u4pyT(1IuLXM= zxXQSna39QdkkC$*cg5M_@dj+YQ_B^>EK(wnmzL)7^>4zZg0f)+u&2!aLdm4rb5LZ+ z&I{=tvbJo4KDmJe3%iqTzArDyX~z;pCPNkn{RaK{X-GY>vn{%R6bwICt4I*@k%Og)9b9t07Vv&Qj!D^FIqem59MgiF&Vltd_Z6B*VB_w z@cm3*2ml;eo!h>;9$}F0J6l>@q;sCGps3yiuEmt&^8brR{TG$Gten0U$9N3eFtBLv z_F3-e1CaWwn$n(X=WbzZ0OaHzRXXUJM!G$OtArugH_~lcU}fIm!uM*3Eb!txW<7zQ zvSxfJ=N9I>YT@KRQD4q|H{O*n0_{J)^G08PeM}~61POEeYbU3gm*`Yvo>Yx)IsN5X z_RzyQYSU(4G+~~+-kbFR;+7y$!eise)NWdLCSOUy_vR7dCY0+Nx%DEXP2>T>!wNek zqBHGIXbn8nab^Qb*2kH#A}xND!w2Rc&bkMn(2-YQLva%Et?Sj^l3kHz8*Xn-`iPHu zHc;1NF)*QdSmF<))y?~mpV>=9`*v6T6`yU~3)vTAawt?^8Y%A<7;{h)-yb>bhO!sgGq>hJ`0MR&OfRs!SQMkgZijohwQB0r=+reDrr40Ba|v!E_iK=JKL26}!+Ucf zH1R38(hBl7_98D?|0QSF@6kvR5-|q0OP;PTi1pKuNsa_H!(4U^OO zE&6FJ95f@ibBA07woB#;n$8)S06QF1et|z4H)(94LyMlqXgn%0&86=+uMc!smfu(~ z4As}4ek~zae88g6yD!jl*5MFv{!7YbQZKVFR6c6_RWNhnJbtPi1L8W{^5PseX=!T} z1E=G(obCooNlB8_c%rTyV={{}f!LGpKK>bAD|=9m7mb+QL02xmh$WGgJD`WRcHmUA zRH*)pDbm25f6)cKA3Cfv`%uk?s`Nr;@W}dE@#b8wwt*0HoYfIm1=}Dja`r~`VFnuL zl2v9qcw{l_QlZBgHl1N28`IW-zM3w)eiRAA?rh4vh5o#-Q!rhMiQzCI4z?9fY^AMc zdhO3v=r$^1D*pwZ=N>U*(3EI5|C_72k*kNpha1G@w)8_nUhr)wL&$w75rY)AJuqIe zyqfhN*fMsXLK=YkCT(1VXVFXdOt5J>+-gm7RB)$f_i4_k->)#{O1Sn+lxm*jS8GfP z1RGjJV--hV)cf9-kr2S}6K5CMn>}Y0Ib}CUNn94S^y2IRv5Goz`8>v^6sYFuhP{Hf zRs0|wy*>>$79-3VDDs!cht3d~X6Lbjk(}PRo|0?-);NKxi-couygkuL_N1W!Jh~W* z1vO#dpO*5&tjZwqXSy0iV4Ev1k8zJ0piUNiFN~>(9BynOf9AK_!yecPQR+=3Cts33 zYJUq{T8wP*$X^3;!LCs;Wm|@u0h8&BT+@4ib%d#YBYL2tA@lk>zUjW9g4+ZveR)J) zPgKc*3U``c=^g)beHml#tEG`5XJ(4Aw$MHRS@7F2Ta&atf%@M#3{tfzh6F1O9a+QU zwle$T44szJ%yo{j-fYjUj{EMs<;^tHF%T=^Zu|7Yq z<2Sgq+P|;9#WAE?qsG(j9|5*o;sGajpABYTUiTVnh2DLP+1`2f`6I})Q_*g z$gx-*0vm5Ceop;K2z=eP!a2Ju$k7a~UzMK|DADQgrReG;XMBpm@J3^M@jQ1+@m^q; z+0@RGGi_6QQot7LpHeh5yT|rFbvFT6{2+`^AlF!*5s5DbNZ`B(DLxiCXwI!N8GrWjuj~k}bb%2N&X=C7V1g!0*QIG2`d0#z;J@jV$+3LnS2y$4~+p{L#eY{u&#P=>H z2Fu&qo3g?>iqMqoVt}WUyen+c8EkMDKpS!?;eW22uncUaB*AT^rKRy1q`+is8ok+~ zo)PGuay$ZE4^V8II8%Rr{H%DK*mq8?W5LtK*UI==efDt)2}#ZQ*Ll(MWEygfVCePC zE~;!^;%nub)$4qR^RWKKoa4XkH=t4SjK#K;OT73=*E+bFP*7e0Gwwk7!C9Jh^$HE% zu+5O?{={p8wA>y4ij9Lq;x5h4uiN$T^y&IIq!c-#6zrNS>iN3q{N~|uI_X7cp17!J z>{SpvX8X$S|JYFfaiSVR0KJR({sbW&__Qbt7^u&{kLN?TJQs~~_Cb#K(hv9a$05Gq z3`=f-APVj5Gt*Crz(#MzW69f3K6i)!QEHM7MFj+A><`@W3By9ASKMN$A0s zlI+QYXXH1Fozxy5PX!#z7iTd^IP%tkEJvo|@jvVdSB@_Dn*#PpxpO5W@Yt@_e0L}h zmK!ko(G2jk+CmT#;Mo>zHZ+6PUln~b^&SEZT;RBs^Gr^-L`7KvKZ;9Ug-6Hu!w zod~+wh$g%M5>HX`bfSu$$G^-__d{Rsf&f9*_O<2w{i8^eALXU%mqat#5=MtJFi`SC z{b7G=JcJ9eRoYuarr%hjEV45@rcp^=l{lm-8BqTRZmSs!Md*>8#2V|XwwFFlKS{D zEs=&q2{=1Z(C`53Icy&_hB2(7TUbO$r#UIa{2m7s&@_)2P@(IPoW$UypNW^P7P`-# zbod?y1=yGcA>s)qll{+y&CZX@qeDBQdaM?f)fpa4>v>1bXyx0yc0jio5;JI?uSG5Q zES;-I+=7%y9Ki@j6MjMYbT!gk|r5S=a&Hy1s1 zC+ggt3Q#z{_q0Z{0*C@D83y$0^UK~&MI$>NhO#Bb!1|f#$ z|02HVG8l?A7fPzDw_dPbg!%ahXTV@U_qH>fZ0L%zYG$gB&c6Y!#orhXccGe_q7$2f zdx$Kkh{O@R2N&2cQX8icjyf%)W=Ox3+-bE5KOspm$w@x%dT8H#pI{-Fm* znTkW!bdu^0drq?Rzli5bSV#GUqtg@v98v@}@(mFYlfxLVFkd@-{gE`~@3FgIMDXo1 zKnma4DST1xY~%3K66!1|X)~ z5s0~Um$dL^(PfBgo@V(4xI(Dh^79#4tt5Uh4km6_Vp1E^wLIMtbO!UOFP z0RVOqj?&BYz-rTK+i$Ebvq6+-@XWMD0tL=RDs8GS4Y3*Ty?H16+!t-VwfaZ>7TVj; zl{Ai?eny?LbIwx4sctvmp5&0jWYCaf(aGL{1bb-y>j)k&GAslO^0?ZL5w{7QY&&|; zln#pM>%70dJqX`>Y6+5>+O`QcP7AGGFuDf@(3aancoYG?oqu#AKoR~gV|p`S;QQ}@ zonG>Nugy3Di@;aQg1Oms00g9>ej}oNA2-;|I|5_3N4_10`%Ad~>?hm1i@5ZAtW#+U zr`_k!*3OLn{)Y+Bxe96`wg#Fg-FXuY5=xL_ZNaB%JG-gTE7y)u$ODOrms~P1MG|`9 zw*|Y}bo2ts8dlyop7$P)lHOlSigwI#G9Gy1CVN6 z)-KL|Y5v}|>-D?HFKEQIHeXg@Mpg}=#-A)y^`@zXH5idMUzqe;p0lZc*A3($FajTE zlZIz=vYP{}NI%mF5e}S#tq1G`{2TI>{O66qS22v!dym$NS#+3yXu9Iinuw^Do$@_ ztOcG1ylBMzxQaC*abi)Wq!7ZSI8yF)Wm55-cQv5izVVh{%7ZhoXXXPTwD?bH_tU1m z3qd}_pL3&2N@&{rhc95e_4jQ6&Z63ZPIFfBM@(X{F&EcU6zFnVe`Cxhm&vj-v_SKF zWR{`KT0f~kniIBa*SU?P6{(WWnaI6O*zN>tS9bwAK;F*hNOF8^cb)5e;k{^Nn6WdvA@udvq(gYVgSzZJnMn_bR<~aJutHuno0GqzqtEj> z5%d*;1C|NheAAupgjdtJOnUFnwnsJt9v>2u`d!s4e%zQ`{qux!X+(MB|Dg7Uv|V0a zz6NIHkZ?MVu77b99951Jtf*xg*&>X46?_M8R8(YTWl=kBwrFpXfg`H8$rjTA3>|u& z!JK{*O&l_Erd_Z#l!f^93p+>Lhw|g6kR6EUmf9(EbbG*6obfX6dGLU=)3uLFV4jYy zE)<~sp8|Ndpq6WGtp2bkvAyeQ>zJP!PnI=}jZ_tfKw3T?`}E=N3eU&a+q=QtnIq)j zRC?lCGW}S%RP^NTh4e8sdB|hNsXdd@$Fn4+>!42)^#30z3OMsV15B6T4bWcR?lK7) zWymeuWc=7A?je}6>KZ}hcCuykti$yc;w(D5_)FaPw*6m8xN<7@B;>Xh^4dpc8}D8- zF&=RrnMy{|QV}xVuK{t|MFpsV%9Mn^!I$_agNwqI8pb;`yyj|`$MvErEK$dxvn zBwUj4!JbmP>YHi~clTZNieQ+BFZ}XsN_VHw7QR~jGxk@c?u*$kRNw@|Xy88SpNd2$ zsc|Jmn^u<{Mb2U`*5iv$t0VqUOVvrDOv7D^f|3|n_c)K1q~gLEw zLIOStdJkXp92Wif2nxnlf!e#D^>BlYCCIgN=GSb`q*u0|Zl$mF4e-8BAZ5H?WmGu6 z*wXas6A5v~hY9x(k7oqyXMk2hf6E70ehN`aI9kT3?f12O#BfU>a_+4Zt*OCMm};IpV))~WR^+7Gl&uji$pqz|TZ#cLAwW=X#&eG({}NoJ+cV=UI}uLvgv(EkRBIQErE)r%PKl@phtz zFmtP6lnJVC6MhL+alx~}Gs!SAM)Zv1yUg#(_se7{=&m=6zGF^^Wk0m%Mz!%wK|z*i zxj82Y9lV;Fh>nv|(IhH|y{I3ZBGFi*Y-w@m+wT*~oGygzWnQO*Rmb5zj<&qWtapsm z2kgYxW!vYLE|aqh8zN%Kc|h3JD|%;3TA-zd*+bjxw(r~jt}GtOLK`Bt4u(R z-TK=6mx#G2CrD+L5iN>U>4@yYZ~gcHO*~4}E>rgX{h^sMaUrkoE16Rb)Og!EFf$lQ z3ZZNnzKCm#%S4!Rh+J}uhf)8jnMk{|`uOL8n)1xBE@wSs{rHH4`<*wLlEo5>=^&rj zaRzOx!l6xtJlHRCNw?zPJ)y#wcZDa|$2mH*55MYWfs}eD#a9&0qNsUX;7L$o^G|T} zbmPdH-CQhUII&?&BHjwx7W37vl(#kZ=J7z0R2+UhDpi-ag9z%A@&Ixii<@`-`!|_# z5i^o>K{i-D6-T{ex>yjavu{aVzPm>cP5w|eN$vmuqKm;^t&~g(BX(q)$`7vMkgC&} zgt$hh-#xKH>>4XD5&($3;r)8jb+YrbiOZ0UqJv|%IHGnOs=;T~k@44uu14HdK^K?4x zeeat{l4zWYcXqHYNFl*f3iAvF9VPg-ilpS5Vu(b+O^19Kk<-9wRjzZgRL_3;$myDB zYX)tfl%@kX>i4FpXGT%JV$hn7u>r7_G5u*fe6a>!ZnoSwC7qk;tEz~`ZLrYZj~j_I z$NT@n>-iNmAK!l1t3gA}8fY?y`k}rW_y^6qwE1Xe0<(}=zt3j*M7b=l_ks&>EQ?=a z8SV0+@`;*@33K3lafFgiZnSip4rCO#d)Jgz52TnP@!e7*iqfVxw~I#u<3a~*ukbLi zxqf{;#Q$)n`;3$Ow`$>;ima7uAqwJ!A1Q>JL#@bJm9)aroJUcyfAkuB$LT|zilM;9 zOPuzcH{3*2APwE7!f$Z?Y?EXPKcTiMidX#w3TK}#$E9smmiig>Z~TO>ExpfX0R$$^ z0Di)(-e713-7jkPl%~oVjW=I6F^rnZR5|xV2AID!4?~@-S;ifWm6Pbu*TGUeQP=el4PwPn(;<_f*N5s*0ZX*D*dG>1dg?Z2LT41r=Hj^OpwhcLK z+`dbMn+uPZ{Yx?aZ>~X0xC6EGb0Fb00ig$tlq0_;#;nAJL2;hk8uV%p&9mHct|Sx)(?5`a;-QY5V%!KrICp2@DGMPT zc2DjK-Ao5ea_Xmf+*j2ttF46JAYpHoC;~bRX$DP_zY9eH1~aoph`~t=%yXs4xA8Yb zo!1@@jpzSu#ThmNk*y}4TxG$?XKfS%fo2E9X%(7-cs{&{d1HSKDl252%FgCEx@$%I z%KBkgfjp-5!Om(1pC6cVI4_c7fXaFzN)LOko#O852>Fc+%YAbV*b?!kJgxTWP0~La zb57PJyB7S1^B?%%xB1D!DGkR*`?Q+0I$AlUx3f0z)^ zi=wl#Xm78Md1NU1O*w!a6`JrjkFpp~pbtf#L|c5$wuUl<-RNH@(!9vNzz?U!EUG@>vK^!T;KWm&c>U$2Any6LH5)6t|Lx zwGR$$(hui^3`^}j``dbDLrJ~iYYGb^Es^&TWG~~SquR2>lNmY~*h%h4eAvjX3@%rZ zH|WLTO+h!HwTisgvO1rX+tfxruFh?e0lZo4WGXoG>80&Eq!N>`Uex)HxNbDnZIYM-{Z^atM~NaA)J2$0AI&nY|7LFt{1w>nDP$ z0}I8|6801IQg5x7<4$8FJF&ig(QZojrV{2k>YW|!!!{Sj{9)H(g9r?HpS>Or<8QC> z8?$_GEPzao1%R@J^?MPWyd#-2nYnj677 z=QgIS1YU;FsH8VA-*^)PGZ!)ZSd`YwZI3ZK?_#L%6P`w%x`pRxnuL)=!g6Ug$5Tcb zqTj_|`+l+3a+Ab&AM1H;AXzP8m5&aPrL2l81xu3L3xeBu=awlS?pgye6EwS>$C!%-C2oqX_1n18po3q8_~lt-6Jcq3UCw@F|4& zaN%E0b!oY&C;a={zk!0Mw&w;7IfXRh&TXS?Sl-I<~> zIlme&$5|LAGA|UK3iCt_ySoy~5B5LV`BOdlsxX%qVJ&U%Vq6;4VK%usWklC5J|1Xa z0sDzbb*H~`n2&axD+bzeh2sFX<7S&I_M8B9wD(9?^@@>5bCOvve(Ve3YBX{N!rZX% zunFY}D2p#EJZg|~UgJPlX59}smr#f4jT6q4(t)%4zxmnbGPA&cCm!!-1c>BW6BLCz zWvdSNqc6F0AwC}AiarL0qR9?UYddJn(e#j`1GF<>s_t+zQo}Xa-D|rA2~+_W(2X15 zqgA{3E?L~8^@q509GUip?_D)(?p|W{(bSoT(;?GHCG`0lDNYQ-7k>lvOiT5ensBQC z=L|z*#6l0)Wqwb7L<`IRHRh6|=|5CBNhwt{wqJ zosMHTPnK{$LzZBFIu|u%V^mt&EBy1x0!=>G(r7}ud7Fcm3@wcuBn-`1H7AQPMDtwD zF|!l_0azIL9kpaQK0eHXugKY1qWWu0#1#i?L#rp1nnK+UDG9A+)iLV?C?Gzq?x;!w zHYGMdq@p=KgJmrf@qrkzlF?saOP|zO>x=Kb#~O7kFDIfTcT9R@6E%!`<#v9NL(19F z5)SWkT{nxdi&5*h#^O(6Q&gM~T`KDS^cn7tB;RVjNDwIxTnu^+DP6KBVdmOlt6B%Jp z<{V8yu4%nRDw!0khPUF)3L^2;2Wg8VN{W8Gf8O9}nPwyo))}rw*g62Iql(sn{hEnp zR~{wF+LQq^#>R16=Uk*{YU%+bnTk@@vu>DiYn$|BB98dJqZ7UgDzT}IlGLFG1 zC`(_TIa+@~^)L#i;VQ*#JS}ZBCm~)__l)hQomjp)d8|&;4(Fhr@9U1pQ~QU7(Afmqz4{mNLqwFB_V=!lvf2QtAV3EL zihSC?cVphX0aD~Uz2pEy{bLIE^U6@907dmUSRO##zMTMO5mEg%^V3A(@p#EM0ZOW`Ct|&Q14S0gKcTh** zvYjd!4t`Ic!ilb4upLA(B#e-DB`X9bpF*}sDN$}_*)0Eu+l3nIxCmHlIEI~T>$tqn zKciH*Uyk$UPJCkiuZtp+yX_Rt5+QkS6FuA@ulf{1+t8}Lb!9aR(DrVPVAXNU@E+Mz` zR4VIvIZ;6AX^${eRsS(>uS~4P~zW`ONW;`KbfBZKSWy`kWd;=54y% z8NvXA!TyLGJXIli2l2UQjeaM2`uaAxGhsb@hEw&M&;+i%eeDH|Fs;@+n5aCvmybfn zBK(zflJ~cXQb4?npN~)BT)<)*_}Pjqg3~c}+n3894)JG(1_v=hfnAqbB+2Ui%;9XU zlE?-?PzX7lIq?h_&6oYR^grRL;{_nLxoh7Rd`I_(CXcx~pR!R$H8KTvcX@lHvnQJ2 zX29ovO}>VZi_7h0pOA3?HWd_%9FY!=_itsmg+SV&mJ-EJNu9R_6#Z9Zi>Mh zB%hEuiWLWF?}Uj~E_cgCd@Lz6FeJm+OZYvjq1+2|uv zZVw0= z`aEv@H+p59} zID4)!w8DkAg3bg^TdKc$@TfRbWr;$WbOb*Ww=Wdg!g!Oy^Zfn~Wy>S=jC7^+yTzf? zmd|ey*UkibEVtrTPu5~cPK$(#MkMi7187o7Q)V5g-GR2O5}=R3TJHosvoaZ`ckeYS z9b|F<92_hu6<=&;*+z_hpTnEq$Ots>qM~i^t|6eKZ#WEWFkc^ousr@&-@mJhX9o&| z?)>Ck-epPbn0{G3@#{bC9IP`cxnJMxTD_M>SEGFgrw-@h?T%*TEx)qF{u#}i>z}bw z9?t%74*7hme#+mgz<{3MZP1S`Op&nnLspj{!E3tP(2Wdm0K2t>gAz_4y%CSzUS|9R}=wK!~`W7rnu#)faZI1UbKK2JPjVgPySn1J3;-#sLkwo|6oP4`muAY*H z(pT;<_rVHEOCg}EE$gXAVeFci_PreArT(qJ9M%<2Tk z3(h8F#C0V_T4!uF$9bCqU~Ht+pykEbCUJ3xKdy#RT`BzO%w^OdV0|KHulS_BZV3gF?aPNMZ7?OA6xpm3X4h1TWgx9S13fRpYc_brhGVQI9LW z)|_Vt?T8go5U(&xV~zV0P!N1>^Z^mb1!m)NqFvsEv4};1(yc^Q#w-pHa9hkFG@3I% zRYFHcNsAHlmhFP`$7Z6_)w|vnh}ac+e>kt}1P*Ef)=#m-&2A<4vsXc^3iSC`hlok( zx7n(Hs`VT|YjC1+lQlpM<_UD0qtv{5-*;DO9;->Cc$cW`gaZlzY(1mpBZeFIJ5_bw zv|`QBYo+`1Gb}MI_;J3U-{RU8gUE;5fVPJt@xU`l_0G~=pJhO($n3E5IR!kv2SAmQ za*;E0p=jcUs{8Rey@_`){?PJ=PvU!J9|8bS1!_ejZ=I^!3tGr8{$4qD5MO*wNo|6s z?f-b=xEwh7X|w2CH6>4;boT@h?J<3Ux?Y0iE9c~I7_CG!Gi3Ve(ngrCHSCA#alv;M zB{dG!qk=jJ`imRKUu4ODPJ4_@5yPu@%>X8WvZ}sD1P_38b_if4{_XQMwFrx5@qQ&8 ztQGz}PL9OS%uhNAX=7>nAv4bU6Xhb==~&;h*(M9M`6H_5JZuEi-wtv&cWl3uHFNA8 zm3SVS;k0-xnql_eE1{s<=JG*?@37;)-nIdl^yDlGp5gYU=Q&6~+LThKCN`ifExcuh zXpm1Axzkg5fMIEBGB%8U?_nKfWI!hGiwGR02fttz#%WztPev6`GM%_)2dL#M9n~*6{*X(*TA4W!^B>#;{;v({0nBfr zKc`JVN4{&|--U-e=U)im+kQl&lrB@Kk1623!HyUGAjWQ{gEq~kAG%g7puk%EQHLbw z6Ry!4F&DP4#j$gJ_>s?EQfgRX_JSq3{_@*hbF38s9HSLIkq>@35^|Z&B=Ny4)Q1e3 zZ!}h4?qRU7An|^Sz%y5&q)B~UMrggHB99pHkq)_5&MClArm5U;@aC6L!Z70F6+$GW#4y`w31P}RJRav@hoAm_MqFR7AhY^+i5s^mmU^2*N@UTpv^H$^^L99+vkRC3PC8 z`M`&@-#)s0W6f6Po@8lU42Omzw1OOmOXj@jjU%UMj6RBkBA2y>;7Z0$TlPDaKRv{B zQ2EK+X7{5EUvueH2Kf&?9%F6fY+TX3n!c~NI84XyZ|x1vj$`{C0wxO;Jsp9tN~YPL1D2z?`X6P2&dMR9-$ogn^On)g;rZK_$1Z^uPEJk` zuN|$`xDAo_-W$9MRxgpznFDh>`&W6iN>?6W@3(J1#rzu@8KJWL!pNc+FLw%(s) zws-IbUV3GUS^OWQp+13%yEp`rNQ-4*Vd1quMF8a2*ZK(#Cf{NAOAB2q(V)NQrsx{s z2qtiUaFvo04|>wuM-90%N26T48*X#%X7V# z8a{3Y%S+n+AvZKOjvHaDSRJZA0K7(kNY#3}%qQLUL~@~NzV2;I?QN=tYzwl=s5^@q zEkq}2ddbl{F}?gU2l?M^_FuN$Z8Z2M=W2Ob>UiMneo-RC;1N|i@QPfMhEFTalOnQ) z>=!nk;{An8ui0$m%$Z5DhYJy3(eU%VE!hQ;W{UL7%AVOE4k(|)3yp}M{e3pFO&*UZ z71@M}2j!~Dw!~l<`j_g)WI9i>XYAT92Q-z~OBE^Y3+^8!$*F9ax!7^3Yan|86b4By z{mCw#?(g>1OmWSU!~uZkaHN*0cr$J3)lcMUzAbRvwY!7eEcnpk`~N$<;D$oo)Z*!c z{olI!nu7g>xw9AGD_pwO)-7yX2|ICq%&j<+L2^YMhidcw+LtY+Q--ylG9w{AuLpzcT|U^q$cr5+ohi%4r%^i0;73-tC$0M8hcmXc#=XAhhrQ zF?E(fZMIRj#!K+xE~U5^_ZDyQ;w~xfP^`GSwZ);hYjAf+i+gYj#kIJf=l#wc$?wcF znM@}4y4PO&TH!!!-D!$igqV1;i8~^^m27r&H21?uMuSm*Db+0Yl2kzzPs4H}J=shh zqJDk5t_J29zmSJM^4J{zlvG?{e*BKB?ut?ozkX@c>%f*ceY?`LpR<+9ijNRfWO$18 ze139di6vmLtr4TiwM|aJLc@|@BcWXnYRuf{5Rl{fZ zp76Qjb73~5^4e}c-SB{wZP^Pft;IP2Y+c`flY!z-_Lgk`e$ID4-X3@5w@-qhH?KG`A-9~3Og_zhIl?hcyj z2xSVRWy(T#E=;ZXqz`m~mf^uCNn_k~0uPNDej_V+zIB=|%cY^(TVm0~8*~y^B+&vg zg$tM4Uz1(=J3Lo^?|+MN#mW15({x23k^b0whR!@;mAI`3u;al6s9#XIyIjJ|?x+t) zKEZ1XzKMR1uasFe6=eCZa)MrLwf-*`GWZ#~_<$?9wTgSWDgC1Ga0XB44j$~j=|8S% zTqjpVj3N8{5}D)gE5Cao-W4E!|LgfSuI_ROVU8{#OyKpAj&t76ZjSI4^||;~tbx>KcVd zG~y>#b>*;aI6HXL*^)hL5f@|}PNw|b`rttJOQKqSrhps~wMgOMmDkcNKwCICzg4Fy zAHGo6%V4`^;TkV?`De3Y{+eTKeCq*qKYyGhL@~US=n|3rfMAAa1oIANP{ur_D2E+4 zrKS~H9FEapT@Jk{Mgx*u#A+Gv+z~}r^$+HzE4n%#%cU5M=wa-fEvYIq>^}+r1|8x6 zwRWHWE?A%qzC(;BtFH4A09rUH+2)}i^SiJ<$GHAbliK~76QHM^NM~xNBFEnH{7y9H z)M7ASt!Gn4Gr2G?U*Uw&GBPVOcFX4v67_C%5vfF$d zGXf_*T=bt1zMu`|7jr|^N(M3B111b&?9BIV$gB-dA@i3a8HH+dbt@tKeW0RfN5}@1 zd*_^#w0;Dzrgn7gJSZ+&fP_1N%2DX80cSy8K8K+L z_~jt90F;3SKx-vzTWN1r6!h~2Q<-Iawg6crQGO#*)&8NRi0ef&Bv@982Y z8u5b{@98x$FP;hkt#H_Vv8CmINGIoaFxr&2b1cPIu8EI|rtA@eZ^H+Hl9|;W7gtmE zA8b4fvMn)iI2>=HM~EdU{zu^L_FMb}CPX-M`Vl?5_9!oReDn{$;6c}Ok+6|l0)Vst z;{iNZ1(`%Zkp;PED)FfpcgFn9gK?W3GGOTiAcnw92F~JAxy_b|*l{|eq+#$RJXZKx@6x+j~ zU1r7B0CS5xfzKYNs!NQEDzn5q z9X8{RE~Et25Q;^O1O|~3Lrl&}H=l4~iEe@)vL$`6>~uH=kJ2@G91eiNxi}^AtILq) z)K`XCKInpw>};Tp0V}tviXp%5<#o?!Afq!ZB>r?A2BTs$puXomGxxfbnCX>EV3URs@cT-&Jx4 zpm1(md*CjW{E{`dP`t+w)aunSyo3Ng2pEckct@&GukkZ`%9r!yQ`{-8i6qpuUOML# zyW8X}frF60(9hR*09X1sshBip9sE33{j^?n-f3@dPiTWhDL8(b)7sh!u(AG=Q>XI- z&B&K+K*6~WAxxq-sg_a+w=FKnZoJoSpQVLMf~70+3b2gbu2*H{Z0Oh6jQ+0><60+N z4|sW6H&Zyi@Vi}K&FZ^Dlzh77SnDd2wdhs;B7;KDbRe|lcnqB@sBqbxwUjZ!>RZ(? zaDJNA&cZMXCurXVEGu(E(K>9Mgjeve4sC{ZPNeti0$NI9aC})4m@ODKQcX8}Ti+AviNz_abrn7)Gi%JALw5UJR z+2aM+BNL#AXd3U^->?uOYM?YA!S2mJ=S-6<2fB_u`j*JL7V16lRZfk<>R$m_u9WnJ zj>P-e!!R;w5z{L^Re%S97$KnC3o*I?GFk7MMrlG52c=#7m-#}ebYm~sdVjaM4%fNb`hXa7^Kw?4%e`(#i-Qb~6VguntHMtwJHsjDRx3MgC?7 zu5G77^Cs^pNk#kS?ubYdk+QdLfC+wR92|H?OA}ro#gx+ruT5U3&99??#oc+Pm46g3 zArY$X*TjKgEJN ziij?&s(=9z3nKVC8tk1V45BV-O-d#W9UyEgE}ncR5MB=C+ih+0(%aCwG_^=>JNt zHv5O~m(@t9MAE@Sv;HkE{+QnjuIZELzn`K_@u>-YBRHM@m%Xac(HT?Rs}O}QtAB}z z7%NPz?_tGdP8Rvj{t5l#$L5925&n2da!~m@REHIVf~Df|H0Kq<`kE`6~Td-?(mS} zj}Wm@4tI#_UJ`I_bH^7H`;fH)8xrB0n}Wg48(zP2gkx(=^1z|>Y~Be%s`2A0c@xn~ zff4{cG_z^Y?VkNMv*$MAhUP4&_=xcCFLY7zAL5OHG7pa5#uU$8%lEgsk29*L`;=Y% zH|zdkr`-=|PTk%yPF;88>%Jp1&9@ns|NgRdct6Pn!c%KZ6whvt1--6zu#KnwRlvB; z@2^V8hpy&`0Aq?iV(2Ql2nk=ZvSz3sExx$A7cTc%nuvVC%rGNn_`AJk@(Mw1YA*yO zanZsTx9@ye0j#}U0NIiF#m~R74+jtq<^3T;wm+x3io()M*~dRzJ&B2xvd9ut1?cL? z?ItC-F8OzJOAox^<*Q;IFFeMoQar0|!kYW~INd=wn@oZ}xuR(j1XHCr|Fnv_ii}-CT-zjZQ;RLD2P#H#5#l@ zPt3cz`g}g}<-|%YMF7*ckLx3ma>n2I{_%n(+v1AS4uo99Uh61tT>;K#W2mecU%zR5 zLW-#Gofw&8{Y)CT5NNo$iXqa7&)wdVxqU{87K=EV>)%WkiS|#sc__wm~E@{2M{plCvf?*|M`-Sg_SOBMPW zpV5;D%J7b_*dO>gqP``LmVYa+7LfHq#GZ0PPz??n(G`j#>Jh@&Ou~v0{w{(Kz@H-H zQv%Pa^gVZ$j8JmgqB!K8Oqc(NqKgs4PhxJiP^#24e}B`F7-9~<<(179Myrhx?TI)| z&}xq68C;$KAUv=ci;<>iF7(iX|9OC~Uk`+aX{vNcM$TJE)T15vMYuyNvWewAPfYdz z@IAEtV1S+RQYR#w7p4^Z+JjC2dMPw%=zfrAz=fzsAE+AUCPqk$e*pAnkU2<3@z8jx zGbw9!S^#nQj(X}R45m$#pU+N$>TRQ-ng>%{F#`4$V=Gc8HdfN<|Jz$MjPc&R#@6k$ z1*Ne6idn2LHhpYKA{F-KzQ6O%t1)mA5*6)Ti+|RwYY0qAqSMsU3SC}UTEh2qGy;fj zKmzAk!S)0im{QhJH5=ujzfhp_S8=VG&a;bHskCJ=gV5@LKjp)*}UI>5(=<>b%ooTT+kaS9Dvmd zn)PT)kOdTPN_^%dRL3iXOG1QUi+ z4K3;%ab|Qz#BoXw>KEOi{R#)X6TW0VZ29Ji!RImFT90r+;sp2JKAv%1kzS%hpl0`) zp#ZPN6^K};MTcY-gHPL77PzetcH*50pKu+21s52UZ0$9XEp++{HBDSWryAkTH=i++vXUW>eo%icRLE3qI3 zBgo@ZY4fq_(6ZwQJXcIkY1y=o(CYUwJF(VLqS*<}qh{ROtqcn^ZX=yld?med$ukT^ zqVE{vo?S%;`&RLonKH$mH3sG6@c!2;*p5V+kR8!geSLOR&KU^tvHD)X>iexWMu)<9 zRf{J#m0ZqeA6(dsh}QPbYp%}}KxnzbLQI_aFOTOj{6rdqyJHyT9!SmeyCsUT`T8C= zQ_@5W#Tg%_guSePG;mMB5HudKx&NqAz&jT|RtC{^>VtDN*s$EEdaOlmAV7EGzGI}B zBN~!FZXx{YwCLk>kS8v-*H^6|D?uU`N37UaZ3kfVW8R;90P=x;e<`Akf<89^@$s8& z?~nWc{vi4d`&(+j6#DZY-mf>A`*F1k>@BMk_1|Zh-cOF}3ak<=>%i5P*0W`+xus zS!M$2C2fR=`H-PEqXU2Z+?>C?AY}>HqRU@%_GP%caeti_x}+AuTJ?RES6~&e(01R~ zl)!bQpp6xqEpE*!tGX=IUf|a(0Z_Lv>@u47PS6Te#wQQ=-cmy4sM#uhqYmVBZGCQi+%3d7I%1<=$Vu=b#t#=V%9f=fV`&sFFI=_eF zc+^7p!lED&Sgno_2+vnw2x(`DNCgZe8=Y{G5o23DAgHs8W}{cJ%jr_f3o;7OS%;u8HUO=q0j1H>JP(T||F0)mdoZ$|~Lw}Ajt z;{c6*!$9hoffP`_4G>cL`i2Sb8RgXGW`Z0=k%eyRphI)u*M4?_-vQVbi&z@rn*m32 zwBLpmLU){Y{}H(8ISvrMzme`$ADI#iQD}edW=CTFcT(&997Q{xr(a2AbbNcS2Izl# z6e$Coi@y(-6%4{ggu?TVt7s3JW9}6ljnOLm;zq7w7NHT}b!<;VU|K`gj6HTXpf2#&e5U zQyuI8GFUQQV=8tY1d8YLzEw2vveU^n3eivs{n$a;^z22m&)aKhN^YvYsAfrH2#t2feuptX-!VNt&b<04*$s`j0h;1Z z*EX&yN^TFv*gE_#@PV833;2|yr@pP}_;k0ugo7HlclCe2Ite`^P&{oEQKJbw@oC=^ z;-k}{flyN0vd_nOU=QI9&GJh=<9F+Bj`RJ#^C^v~M>^lnmcgSWDPCnS>2mD=LeXUF z&xMuOwN#Ud!UGA{+2x{!nD_UfSSarK?Kyp8q5tJs5JLn{RZ)6L+Fi_P`Pf$st-(K@ ze|1BDWZ{>E%dW8)Uv~K8b=+NS zhpARN3Ce`+;cse62l;CJes@4^SiG*8wT9Z`LYI>;NXxq7ur7L!OGj*ll^M=sm~3*R zZ{v}6xmSKjoIs%fcV0%!*BO`P#W;Rnnv7MS`lzwBXhs;e@lg0Pt z>XaHbNr|@1G<3a+4jhng;`=qsKR4!t zglz08T~~_eqf1D!G=;x~g=?U7p}`jAF*dS(FF4c+li?y;mVDKr@^7}k32_>=2PvAt z;`(&w26Z-|EnmsjP=*jwH;70phqrRogsW}0OmEeVDBpH(qfUk|y*Ec4a*dszZ7417 zTM&%gEqnXSz^+YUi^+bC`Ia3gv^$*g;wr4WyL*TI8g!QGzi4bLyvxzs0hQCUr{(U) z2ybjUBEYHFF0F)qSzktqVfgRU3iPDaF0A$=fN@oE4S(JT58PprIPa`@XLZs-{d~0s zN1v<1jXvCmpu}V>WRxr*#S@Vd{37?U-67Dv{iMd0S1S~P0hZDRq1K?(p>302RTJS8 zj1D(Wk7LAw5?N|}G$0@KA zPGdE*$EQ=|Ax-R;rO($lXf;ei45pU;=2DidEa~-WXALIZ{TV*#Z*;1ABjDfSWx39F{UvsY|W?BfH52}Su&na zxQb_;|Iz6Q*|d+Gc96!&M=?huM21Ibq#q5>d*fcy%bCts>gm>7#_8MS0C9VV0vlPS zk;?$6RH=$iP^EWadQDdAkGmd>s_hcx%y59@#Vm&VGpDFwWV|V2f^(6f{jr4PS*2%I zcj|`PY)l?;ul=ayxx}snRes8ED&jNEhYCHXJf08#Y;B;1L%`d=q#k`Ysv8M#+WVO`}7< zlHriMryyO)Zb|L2qbOw>tiky~ddN39lghMdWJRW%o&!DP?FCZ%G)cmkp{Blvf(+BY zvnmUh&!&~X7GJrA3k>DER6!i3Xpi&-D627FnZl1w76h6B8WcsWhN#t>y=_Yw6l(d| zq6s{bZ%90d&@U(LuZf%qU-#1V-aJxQW=Z0^4Y~Kn^Z$Uj@0+q`PLq!?HTMHVs?GIE zR^!FL8m5o4jDjls<-ryq%jRo)<7pC=*l6zMktbsqxjL96QIC78<+S$=EYT-*t5tgb zmkt`AmM@RhjGsLmqMHhKtXN~0u`=W5Im+`KsN}y^?#2oKHQp8=SL5F2`8r*xFHC2*M)yDrj#yIX+%S^1rokw^l_$1EaS=n&1 z6*5|t)VV`-vXIuZKwM?JE1TKYWUBJlaSStGene_-DnEC$DzdU!=)RF9Z!(Q|&M0H} zPMcxUC|_B;Iw2a&lBLyEK6omK2rKAZ5jHmbYuC0?NlFH;(D-4+Ib& zxL0FpafTNZZHM7k1T}FT&S~s+f+pf(kb(-CJPgHhJqTGk{+4Sy!dW`o5VEAMV{2*Za>wYP zRwU~)YdeG5$WY$A7sxucT;N+Ez^vX*7CmoWSPnPGAFP;{o*H&nD>YYc{N<64?Y_v0 zk%*~TiAW;6qgzdGKcX#bEZ16ITcJk9nQWKme6=*K<3MO;)JQz6SrJ6uXq&5_{EykF zF0yZ{Z!o67^kd)pJLja|CZC!{Qe=Do{k7=$Hyz+Dl+1}-vc-^;ldHQ_=fTPHz@XR* zQQ%n+B2mM%1>muSzU2tH&q`b_`KAy?NwKkTF8DPHsC+_ASup547 zl%>|8sZPkLhlLY{+1I5&_p?$LsnbVruw)G)GZn^xAQ zY`{vRL0)AzJNk1c8K$W{y3n6~pz@8>Fi?$KjFS#!hsC%sWxXSeoYm(9jY zWTZ+xRMzS^1eEnO#w96plm`df}jbHs0ihm(XqT)n8(LaOl3*KOyW1fZZOmt9t$qHSgT6MwEG z`+~1xe)lrtk~1Ro^i;Sr8s~fJIbEGv8TeBdU}76K za6allS>_!B@;+V5uM@&xIp^_m<|5vo-Z66(qb?qx-}h)ig7XQhwC^oXr6&0@Vc4*E zCVAZy2b=DAnpT^a|NOC4uq`50^5DgBm`k26><964Mf(WW?CY}JjAqSzCvcV@Y9(_gbD1)WY@=p>XpoMV_BABT@8hC>)aTCc z|GN@564~ML=e=oIsj)57m{<==mky_%>veeTb&tE&(KkDsb-O%3bC^1s_nENzfUH45 zpy(RBaa$9KZxRP>Ih8EBM1L9z7)j$3T}W4&`pl7IJMp{+mjlgNlZxUC>vZYf^+F&; zmZB9@{B-=#c22ik2R^MCLz1da_qD79)M&Q1J8dlz!j6GFM9_wf9TBg%O1cH4+(t>( z2n))_3Pzb5DTP{+j`a2gIMSnU>E{%0BWx-AORbeSOc?B45-!f!)yIygSAQTFF;!*= z5QgonK6Awm)Inl}#~zH>(~S=Y4g*XpQeH}Lr5r=)nUBzrszd2$FEN5SP3AJx7D5*p z(XZcLV&kY&90VqgoLyc;yy~ry{aD!=^8^aRre);Y=@NGQhwt;r2knB`?!-Kx)%Y3| z`&25Ga`=dZbSYU6iUO5%`1uyb)im5Rzc&xS-eDJLjpopvU6XKTB5mq1L;als);3j2 zJfd61^6WELRW&rlLvPQdrS!*)6?dL;9nC6nZ)xu^Hb%!flQvD1L_+d=?ZZnMnun6b zXu|UW5=ElQ(IXU6yTEJ(iY99KR7%yzplMVwIUA_k=%Hz5N3u1fR8FoiGWVH^+S zuI#*?b$`>4!1|x9%45B8g-qsGQvK`RgUbteVI^+gA4+nK?o+^)<5}&M5qWmJhw!Tj zYtZv$R|hA{k{pboW~!d4%s$jk?)pMs73vO5!!U z6{-=I&)wjUi~G(nz)0Ha2|}Lg@OBG386_L>Ekv=@bgwb#rf&B*9@hMNAu;(ct%sX| zK^B(m5cSkuadsomdICH#zcuhq zQQ{SwcZ)bEK2#A8i=9I5Ux|<;-KD>7kVGb6^>m7EDkw*#c6Xj`%vV*M3HhME5}X*w zyCOK>4F7oB=Z=OXxfobMg8OD;)X`9^3CHS&a}Y9CU+bYl!=5oI*Lxmx#rs9&0^+p# z)!sxveTR<QzmYSTOEcw#0Onk^n=*ydydkqq0~pr_y7CoP-~-tYo9hn z=6Lb2v9JUYYBQshSeKw8!3jXfnSgE9K_ynOMH75rTpN}fJS$lDc&TC@8;^hDbf0=j zW2q*QXq5KD=G=VD21mzr-!&4XB-n?iX6cau$Byi0%{0;8H*vh(HRP^SR9B+Wea-tyyZ4sbf~Ny)cImVq>CZvQ(;=-Md-C~n0*{bUm9gpc z2H@ikL}vqG@&^Icq0QrZXKSbu>X0VM>535bGAyoley$&4^H2kUgC(YS*?j78%-qFYd)x6`h~1TVVld0Z~!Bg zKfpzJn@fk7)1s)D$Q^_nYqZ?~KHsY4(#BF@_|%i9puX*%Pt{luitM_u{Aq!%B%8{0 zaf#MyP3*Tr<|_}CCTHg+4b!Xy;#rhIVb`s0*PQ{;R@XP45;kB+g@?0CXj}}hJqFRw z^XLMV{_-@@3Yu~Mwqb-Vx6IhK+x&_XHf9MXC`e!euX5O%TBE3j!gsu_FUDC1@jz~p zd?~fuSpUPw?z~RkG1{#zoY3q<1UP+22!8$3N2@9o?@DyoSG_>b(M6t^(CIteCNJ1Y z7RDof_)SSfs;C>pLp1s;rz-p=%Dv7(T~Lv`xpqC>;T>lXPfgx^>im@9gk?-JAJMEK zmm4yLEVtGWg@>Q%VzH{6d(HC;j4SY53Kk;dX^9&wenUG67JS?$FE5e4=8*YR zV}bHvvumWIXUa^WjVkN`)O@Dk z|6W45;P}8#%sVZ#p-}hj;HsKJ79~y8vO3+gRg1nJ)9}^DY0rP1x+`(m@`ke(`@B+f zgQ_%=YIB$1eWp#7!eE>Dj}*IUcFzi3RCM6Nt+5Iwud6K{>bKC3>v@rJ=yGq|$e{9N z7Us zH;tkcX%bVgS?-)%eW&$euxH@De!tCcqJ>t`ck@nb5N&BXS|k=)?QBxre!u`t8(E+c z><`g&sWiRCsM&35zMcTXq{WF)G}P@kY8wG>L{i)XUdQC909+@klk8~sibZrC8bI9^ z67#~mo;=A@j4!o+KG@sV#Fp7{$10B=^LFTmqa4;zeTuOYCVBIuW^@Gq8V{D4qk_`SAtHy2U}5*|K&;P!wna!5S_7&)Rf+uXGDNW;DI{bdrWxx_nRAEG|iw~a_(0=cpW zk9Bekd45&Mk%4JBV(t93cCPLFG9+EH%;j=gM#?7Um_|v(99#7&+Kk#cjA-mY3th8w zw|yqJUxv6hylI%LBDmE*97ZMj-#d&C;-|pvtxjV8k{?ejl{8klvmd&r_zcz}Sd1G@ z{P764C-BXroUqLP_L5V`%3aLwwL^+>uUge}d-Ipu(u~@7=Pko%-43pV`@v8B-p`#1 zqK0gI*}`s&atYLNKot+$-kZWVLJYNUgg6XaYWrr^hX<8f1}FMYISg@kp;V-P1?SGJPxY5i84(WwoYZ&%fE>%w{e^)x~r zdW(ultSRexbuf1&!R=`e_*2~W3*2KY56=&$c56mm{@TCCuOh<356D}HtHs+NyICGr zrt~vvP=h4txFnr`z}wsn-ugtgh`ZF&{gryTW?A>LTy~E5lKx9OOWd8X)tb3}tHy`b zE=WzGcyq|lv&f=-+8&)lKe9$)v0T+VAts^%A`SR%Q!}ID9h+X=!7DS z!gJbL@R&@>v6z+5k`s&spRdO?pKhveaSS!vtaXRwvXPt(${V=Yu9Igiw3ELQ{_3a)OwUS7FADcuItVY}$8BspKa zb6oETZZXh#O+XfU#!YkkOEP%knyWv;k=NONmDKCRB$E4PwPmXnH^|Df=Ve;}#UDat zg|S=-V*G~(D=;A5${jVES&`7?Z$%cEIK~xB zU%uD#tM#&HQ#v)lc6&89G|e*xvHeVObZd$yu_mA~&rd+(m>u0uA*UD{d?R0x=-W@R z@5xEeBeg^2{~>7jT>7H+;3ZVF=L>c1Tn=40e6DY`Tz+Qq)L~zD|l49}{XY#G* z+Bo&GbHI>7iP)xot6YlHnr$=)Kl8uSKVldr|mV>3kx?0x`haA=$FWUa2W?^L^yYO4Adul{Uezx`t?GmBS25f4)6cBCQ|1|jMJu~quT-Q7iy`d3gf7VFbF&Y zXd*m>fF+hnng<<~2s&4@Hvz9mZmA**qdJt!)RjaN6^Mt9#}U-p&B(Mk zq{X6QGUdWbH?3#E2)193!dM;J3qFrjmrgbi3=7yN;9u$IDO43R>$kuDBxoIup{!qE zU!cuw+|Gib?Sv9Be8q2AJ`2klL}ichrg{?&_`m+p6USDOTNu#x# z9A&5>*P~ifvAQM%Nz0DH-DRu|@YWQ<~ zePAk+9Xf~Ex_hw;Vy78lR_>I>wg3p(1CUmY)QaHfxg{lTqwZUr@ri3otJSUEY?Hp` zd|$W2opRdKEpKM;Ppb!~(JJJ+>z`zf2^dHK7R67y{G({PyhEJ2zQxz9f7uwT*X-~m z<+}3g@J))>;iBHUo00dD65Vegj4Ro{Ip4V>=Jh0xu`jnorA;@EVA0MOH zzE>EkI{=ks=4RCOXT2En`WefldIC55_9pFgrku2k$OR-=^Dv^TK9rI2e%FO{`m^JR zVfFBxDJo1;NYd~3RnZ%IaWc?5YL0v|jBi{sLyNdcuwXoQIDWO;!#6zhf<9;7;HFup zAfC-KqI~i>i27E!l+N|s{bF6su<>D6?1$V>?*T3)%_i*8`8c|v1nHKQu7NVuZpInqA(ksKqJpm5C#0Xt13PI1utjtFl(FTF`kODT$0&*IskP$)TF}Ve8wwepkb4 z9opweRuk&_>h#7%ON#OscgL)RtF%Uwrg~#&tLr-cx40&_K+TWZSi5=4%5+Y zd`|5|b|}%x$j#e4NS8LHJQ1{PT7H-YZ79Hrj*mf%_6wlZ_ulw22#I^1WChQJyuxKI z&C}xC+&@~Z=kYkw)q>(1zZ35}57Kk^}D&kicCLPHP=y zxydVegdQy9>EUD0;y)Jm;Oh3yrv)M!^Sj^Miy=67B% zD;RY2uNkvD<~9)&Bt|2Xt0j5LKx3bq`JpN110a-M7WyV;>vc@}x!%04HZcqb_~2nf zYl8UP7R$n8${pK@xV}y+8j+93o|2{aX|2(!Xsc4d11LT!%(;tcz6x7j=2A&lYNNmjFUr zF$tbky)IE9nZ#z$==23{r?=W;dokGc4`68D>jje zP{lrU_Z(Zu(;i!e(l9u|&}y?_jMpX9c3YX(*=vUV$b=X43Z^u4ElXePGXB1WgF1vt zQ64ZvkPxnW3DC#$L4%K_U&VHQ(19&b2jEb|YkH*MIoI zdN!ra0#oH;d!#*4~n1ImV()tWF2hqc2r7KhRBM+AZ( z$RRx~9Sv2w$fyYYU%4`CbfI9RFe5_W zFeG)isFU(y{N$_JF{zj+A*bl7a}n0Ash3?DKGf9Mh|N1g(zKWJ+iCl zGJPt&)?gDYyFZr+bw?@_iR<>Cs1PF&X=f85g@FWl3PROHjHR02SA zNl7R=f+^a4e!crwEK1kSlxa>2}zx_0uQ|{3DOSJuBSC|HMHylYZnE9tVJ^DD>ip=+rkYy zU3DgFbO@)t#%-0F;RTkWD2OJ#9s1|$TW2%UfrD$d9>Iqe){?5pw2Qx9Sura#$PLZI zey%?jswdNW0Kv!Q2&1jlPKHo2Wi>5nUQnwwDplCx?gy+VQFS8>owif=rE_`w5zo)p zGsZ=-YUTWLD)sgWi8OU!2ADp+oQJ)zSpOW9EG-90)r>dBW?tRi&uMgceGg7e`0+)O zsD@^>zk=!Jd|Mr#x@5h-{{f`XJ(1Ok(sy$%Mvv`eECD?Pg8P>?Xw%y(1rHxxAL)Kc z`iH`}er?7LC<@>FyCV=w22Y?T&YUVrepG1l!304zZAG)c0jbua^~+fwg-&SJ5X4qI zn|T-A3;u?SUSJFVM8-s}oHF427P0H+orF(D_3*^QjpL@{5r5+kmB`L`Q68PbV=b-h zp_ynAPO0PKbdy_^OQN@{Q8N0U#7=-dAMxXAeQW~uV#VwX(hB(CAKa~yo6`*~ISllO zQZeODk}4Q9^wk^nd#^;&h*&=1R4IyMp~kLa@dk*tz_2Nr8-ctruC~G7NV}ka($96} zi8Us4eEf;CfuE6df-NP(N_2u(U6O@kxnB2>zpungDc{2D)AY2lI?65Mj&Gq3q%eDK z8HU)K4jb8I^YU1q+Mo(w8;W z2uKM`4d(jHfSD$mms>T;O2;cPk|R&V)JF?tIzz7HfgSSd%nPep8rG;9l#8rNp2$Oi z;V*rb#)?8?4`dW>$`^zsjBB$kPGP`wv5M7P%3NPVo?g#bQcm01-oY*qB5P5(A*5}_ zvrh5x->=9nqPWRJLAGPv1%!y4fYg8v5(EwH7n7r~Q*EMW#u>$kMdsTrt*OE7=qzKu4o}Fb30>z3glL&% zmGNlAM!V?PsdKO;b&8iF=`Fv%rqx~96`?~-A(RgjpWS5noz{H;_UL6A`?5^ z-I86xtxpdoh+r`9*kJ&_WD_jog4fRi3C^AbisY?bp|IRwQz3k=)>F?N(+M1TO)*>^o?JPW2~D>SeMFmi_0?)%ve=yOzgG3&UD?} zZ1+F8<5f;!qz67E5LOEc3+p-bf)a1_ttwJpyu+fe;sdYMd#4+0H)?q6PxYEXHZ=59Z#@B7eGhjNcQyg%LfYH?$}gQGK# zce^-Uu0Bz&79lA@2Ccb*N$&pIR$Lj8{m-Tt&e0(%XTB_YT( zw!fPYfC$&$0OQ+>%l6X%83iSbozp9s?!P&veLK7@&UZN4R2T9$xDo@~4!avPNs%N@ zUFr;V#!TmMRY~|#_AAoX=fb7W1f&UTXQ)z6Mb?@BIb#1Ks{Trr=$b+Xasr-l*8LtR zRdX2N9-sME{3QCv3RYTWA1rV2KTFSLYL=`I6{)lDmjh+ne zg*sEhd6m-;j{5tp-*xA=_N!P~i;XBIM<(I)hm7W@)0;wV9`7X9APBGKReI8?XlRE{ z`A+0aWG~}rLnFRzecG_OH57`(y%`0F6&mML#V)tz%Q9Xx2GMR2l0Ey#G3WaygF-Df zXdcy+QQ$)+_+wt9P#c(9plhAvu-LtKXMeRe;14>bktFtmj%v_h-)mj*Dp8;>8UZ#) zU+XsKRhc~}9q&F=UPi@ZWlc3o{+Z@NUJ&}7g^G_@#WuqI;=z3R`-g>g+t^F~*IhI< z{i}AniR6bNKMjmq0(SAMA87vTXs&fT)>WdK$MReL+1rDY%w4BSuS7Nn!i;e5Yi&a# zeF4b8G}OBzK^oX>W>>#e(nJc$(A-($sK-Fj$AB`T65&s3OD7N!bW3rb>gdiU7cKzP zBZp=RR0O5;SN7cE>Eon_Gb&f)>pAGFFXw)2G_*FDKod70mYX0(ooa=MiwNX0G*-pJ zBaJz-Cz~5F?;E~q^Si7IjMA`!`I?d;_ERNVA7b}3@#zV%G;9oC!`o7!rQ=0BZHhLr z2cHeWO0e%~3-w5hAF=FA&^x!6PTtAgguhb(la+WQUfLUzit7`~PiQyHaMB&J#0p$d zPIZKHEY0Sjx~pH>>mwLZh~!sPLageg_NGe_mge21+VAhC^71hC=j!AOJ^>OegQM-! zLl9Mk`BpBmu0RDYkj{$h%q-cOyj~QllQw@)qV&iNLN+KFKL5DTXhykG%5E%-MUjHt z_syFF6bs{^sx<34$I~JN4md)CRNVe~qW)TkL3DcqhX2LXSq4SffNNN~JEai`=@6tF zR#}=|8kP`=rIGHE1{G-}1*E&XJ0zFxknTp#`<Qy3Od|p`uIa*@2w+d>NRD6{!g!5rA4~mlTFm@^?T7uqhi@U zxo&L6)mYd%r5vs(Kgw1~^Qhl~Dw{^tvlN+xq1jChbcY*=>xK*x;5?VB^fwNCsyU)(XtyRycG+PoZyUmgFK4tQ^ zdLb^mHhZ9^MN~G!OqT+f(LaKpO}D8;F}rH}Z8}#4;0}R*aAJj-^-i;h)`R`FKD)`| z%bI}s+-wY)I=-(;(gU z8tNsES4-M}l5)_bww6_1V46~$fAPmIYa!VYf#95KO4*Hx)Mn&GD({s{6^UzYL;UJU ze&V029FIsim1tCiHm7Roc0DW!c_-rA9OTYYpSwff&=kGu~{IW1A_=P9) z?kI0t(t6Ci(IJq+jg9wO%f$8?g-5AD>*dIUbOq@Qp#ZoC#gy-Zvuz@ z(*$ypmOa4EqV4({&)*&ox8NRm$5ozI!t?Rj?DdUJLhtvc*BM8M({kj#MUfL1zDu<%Y)u- zd7lSH&fX}#)8_l zwZ&2H0R!(&YBYbcl8Jy?SxP^U)O92`hVk6NtKV;y zxk_CZbA(h?BfCctl!k3iC`n~TAV-8NY6w3?B^o7oRQ6GDZ-Jv&MTqRIi`|6t~g%X28NJ;v!8#EOoIL~xK$3jAgfl6kKjWla#;2O*=T zwm~*8ZbBH%^lFFlLE}Su|M~HMNn&mB6<o0tY9sKv!>e=>ZymfcoNSy70B~rUM&m3j=LtSevjHG4E>uB-9 z0myaP{wcI$ui|mm$*z`|?|E^$)O1;}M~sSl!aS6(EyhsK$LR}(=#^oA{YC#3kd;M5 zenRGe+nef3m+Mx_x+H75e)z34F_Q;Qde-3n)I}s)xoRK`Lf*h4VcDbkZI;jeecz^7 z0hk^SlT`t37Y4wL))ej^XW@##O2I;&$AJEX`Q2IMo23*TCnr;mY8~>dQfn*R94|M| za3|h$7VWUen@ifTt7G;5F3X4B!t|xG1=d$jxKO{>)YFN6{=E=H!a$_$TnY)%KdLA_ z_=s;)w=3G97_@Dhw&riR{t$X|wbJVCf7?d<9(d7*_Er$4K_P+**484a-gzSgJJ75C zRG9XHtN=9%waLdZ;QHuU`Ra`ne5Bp);o8pk{lS!))J)67BQBw{ODsfh%3^H}ZmP}& zj(RvepTjv3phsS}OfC1F^|q=-KAFfy0Vrtst0_JNpZ;sj|I=OHJsNgs>o>h|4DB9~ z6L;JOR$bzla&ze&Su5hmV|f2j0$J6z zx8J@RSZ97dG!jFzj{*&NbUSvrN{-R!2mb_cUo+Egx&m!@Zeu55T4JkV{h+kd!7*mU zTYthGJb3hc@V?n#h?}lZ9*QL8A}&XMP5=3uBi=lJX!Kt_qNy;!o|Z~3K65Ky<(KL> zCvor?*9&j+@qH)ZCTsRFy4x2z5^jt;lSK09^ol)s1Vlb7c$vpW0Y`8a)1isPy_qUp zulmyP^;T;0$y3#2u`F8EInFRkPvR&TY1XcdzF3>ZrEEr*XVEOri+w9WaSq-LcK>3iV*I|Fl_uA_)VTqy z9m(UVqKdB7jM1MH#i1nkHEtTi2Y$pT3uH1Ui8cOi>QVc2qbWEDnO)v2E@)3LUrB4! zG#YQ6_nYWrLmDPg^6uoaJia>kn~3j}tEDyadeO%aY!m|}`jB3Jwa{J_qlp3q)4%Nu zqJl#_2W$alqd3E<*jcVd7&RppVfa_usrd>MxgTUcJEYUMrVgj;Cwv85xS+a)ne!aa zD<3O;W*#b?DupDLUfwc)6d&GN3?Z(U5v|k(zrWv}#_9>T__E;JrbPmxY%Od0A-URP zS$9;TC804Qi3FQqRz2aYiZMk7XP=9BQ5!dm`hr|bqs``rA(+FaC=vvOVnYb3#o5?S zQO~&XWC`?E5H4tVC0M;3ECr>|v0ui6>bof>?9i&$dV(N!3v}BDf^vhYR~l8?az9E} zaV@Cpq_IS_Yurdk%jPM$XQ(>b%(xJV#puoaP*O^UQcI4~B!ldVDO zu+uppI?}dyyOC9}Li9*Tanz)1-m@s4dcK?Qk1{=i-lA#Vq4kYkjV42^ALY8Z%)_KI z>))Bbw(EW%WrLzC3?I37;s=NVysAV{$hU9}}%N8|V$6T(~!87p|4FM2;=Vk*ugnc*s@7#S)B_Jbj7 zv-@E|BnrPUKPLFH2VmJ-m(z3{jH~;ChpmDPU5sT4gS7QQX$)_#^JQ9`9ktunGRuEd6`++kI>Yk#hFc3kV~azUZ2nGtm=FUF3%a zWZ?B+;AYy~PO)_Q{)V07vkxt|Uz_&K6uP(zt{z2p<#~VueQSo`E-? zUrc?b7j**2aC7Qjn2(y<9)HtU8m1u=a#=xLF`SCH;tz{KxC=*h=?0#erc2a<%a)6L z(Y%jkf%vM*A4}7#zLVhY{1Z~&GjV7o8M%O8SDchjMEQz%S8>4`kE>+tg^%K@;Ewo9 zZ;w}3)nTZ}H)p3Z$dmC7@YuDQA7UO~--mls{3ZDQwGXIqy?ER}n;D#3z3BJ2+hT0< zxl_LP4}4dixei9&VFX>HrzUoploYW&izDVV!6A!8 z4NUzTx#bKZ$LMrRG4fwZ4kAi;m;~|1>6r&xPSPH}W&|{cbn(x+AaBl9)md6&Q)mII zRD3xw$pR0Do7=4fnR+g%Psi8ph<{t}5? zT>z7i7qBS63DKYH-$yHccTFodmwl~y{J|S2>Q@H#=>b*4JtuejbsLY6F0(J}(OA*e zAM65w^k3*MQZCv?m{&KB=rsvB2`S)ZU62)fA?7+vyI^R2P4vlyEN)4@?ou3Y6E~e( z-LsJTZ8uq;_}wf0O&EfLf(h47Fkti!XniYwZP<}m@h`PGYzR1bwS@tfkFm1;5D5AD zLV8nkg^Rx36unj#k|Yia`EVn+tv%m&=aN z=>6ulpmOKUb9Toq51H(npnd=v7FzV@&z~ZP5StI9flIZFlwibvAr#2qp+5(rFIKBJY7ScBP= zXV^+3uwW?L4=lydiYrON;|WDXYFw&GiYLgLOclxk1dC;;huvWWIt|JJPoNh}rNLMS z{_vl;|IAbd23#lc(*k0PpztbN(JUkM4yM#gfo< z9)nNkiSY0NO_g>z+y&#^+FI&gM7=G)D!o&0e{dg^^jRa`7GTtk9L6r6 z5cKSpxvaT6Q*irK;d#%hc4C+t6j8ZV^(>HVH>%!r21 zuXmL9A{m$A*`VUdz12q1K3uCnEC5cXeUf;aY_PKF~Y8-)0)WPi0+xPh+ zFhfifeR8PD5_D&&GBCF&S8TWuKjtFWmDU>&T~XIpKS|kczHUmSk@W=Bi9-Awqahl%(HnWEPNHqJZ2A@=aq&&8qTv z5=M!dG4{d~Dh@*Y4u5COtHXEklA!yOwcPs__>u16JTF}{X6hW3`eB`0^q@IsrRxk^ zZNbW`ML3}3OFTDuO%C7J{FOq*YrX=F;QZTJv zAryU7z0O$c2YiCY;l7C6n>9Pnn85O}50#)nMiJN*T*h=l#CNZ4 z!O#%%#v<%697q{jl9Hebq$~m%YLo3TdOzvUgw~f=G^cpRE(mnKpbbB`!ycKzS&-20 zzjX_Rl@i%QUNJ{zOFQB4a_i733`5ne46`8C!U=4Jk!EX+(K3+06Su?hX{&OyxH0@f>8F?jHxB2&c;rnQ5f=uB74eGd>cCR@ zjFv{O3f)&Q;}%PJ*b=@Qo4%&;1zk)9$k=j01;ZE-^kQdOq8iK3@?Fx`avD~3%nqw4v_^n63R5pH3WoqYAo$~`dI9)2*m=1#h9&=6#~u z0KJLuxO#kJ-0JfM2pKm>+oi=)G^PfclkG>g4F}^SA;=faJDP&) z3HNQvaS)6EVzB=(saAB^*;c5j+Te*-SkLtJ8%>lY!N%E+HI=c1Ux3cbL5~Ygqs=Q9 zk?Y66E}Lt>%=5kDNBOI{8iOOZaYPN22RR)E#dErYzkYtFIHoZ4$W7f_+Dn{@y?zjq zqp6iZ?JA+m$R}g-QI{ckI9d9K(!zxF!n<~~-!>{-PND3JCbA#dow+)6>#!WH`Dp!S zbk_Nhj`8flzRW$ejOSpShdkfUTY@hiQs3Dd-^kf|ys%$6)nZd7RJJP7o_3aGA0Z*q zDSZ9unXZBE)!cA0gnY_RRCdR1y0>PY<=6EenvqohlnVAil{OJfZ#bRb^*rWBDVbe58H97y#I6v7T^%ZenU;MfAyi>0|EHt@4-zU(^lQF5c&Z*~AQqJ78184#Z}*`bajZDEH6z0(hxdWtxT>9K{lUK9JeMUHki$`8+H0w;6rhqoigIh)+xkNe!1v3 zDo*_Mn`%KAaA*B_F5ry*&8pvn9gCqwhAu#ZfMJK%V!ACzTDL?U8I*Fpy^8c;6x0jy^T;CU;ymfF|oDX|T z!3XX8NkP*aO-n)OC4jd2?{tyCorsb(U#h>QmCjj3hsfNZFI+=+4Yg^D@3y!JHA6Q+BlPeqrttO z;|a?#bMM9io_|B8WVk@syN!*0>a)8xv|F=r+4R+R+_p@6*^bbs+sAH|TdQ47L*XXC zL^6&HKIq$wx2rxon-xLSexx+Cyy@y>W&%21)(wtUWJ^-6-PxtpmCpK>BQ&}s=FLAk z8n~JHa$$HQ)DQv}^w}6{UGrKJ%K4@0T}iH2pMoe3)BD zi)5)&exE_;S+4krnZJT}w6z{(uqZJlg+e`xstx0EdFx?4M`#(KsK73km-brkucNXI zE*|$Lr?HL-`R$=3rJ(;F)NAMkQD_(EWW&%^t)LS1^4y={Tj49tD)ov&dn>Q$z3OY^ zG83=&=A$3n_Wq0@EvGHyf`R_Jy%k68&k>oidPmu3AvC@kIil!2UDED6y4h^PzRw(? zgRLCqn&U%&iG|$f$`!?Dq*UDG@$un1u{ z0+Q)jHMEr?HLYgZ|C02cH4zu901d(OmQhax~}-=Sr8Z1o44ROz-0jtI25ypOu&+ zCDqk0GFKjm^&%~qfvaL3Iju&awHU-|iAvDGO*;76NTXW88)7?+y-igk{kKU)qsX2l zW}}um`-Q2~9aRN~0ogUNTOJ1!ECPE#rv(_Hixow0CmW>^qPt;$kOp zmDf#=Igez&=Zna3-g$dC&qVm0 z3;D2jy3n-eYv-M@KzEmx+=^uF>Bf(%eTmNmmIXpHA-yL%BZ2PDKv^F0a`Df?0=X*M z;hzSLmz)`^1iMlO9GUf{r((_cewMSH&m)S3h55_2)4j)HYbTW?|4PW{nqLLfDtIb; zTj|SDo3sGe;Ik7=R#sk!6nJ`hE@4Nj+3}g&Zi#wW_IY`v>k_ERnp^ktibPmzwHlW# zzgWF+e)4S0xE>{jy!7^~#G=bAjX%j2oLu#KIUJx9+UuM#v9Lvu$iU0e(^et3qv8R_ z>T+PuE;Z5H=qliF+TVLssPl$*$7lLgS~f5dnOv z&J!f=_QsK5L4y9na@wZ1=jgM7NHmw!))s*%dlazA7JIyX(z(4pNHW160s)<;^C;q4k~q!HZ%$Gg7Z|LoF^T#cs&DCDuP7?&V^3mSj1e6`8A(&FvKBrvW%NZ7qqNvrbD#9hWUy@@p_`PEqD)J z@8q|-<3!ZwLZHgSe-Q)24Yaghnk(1ehsUT}@Ldq4g@1R^seZ9ZS^HJ>Tidawlg|Ow zByrB!TQ0it%%y&va?JXvvpiH6!FI3o=yyDm7PNn>W?uv949a_VY5IMH8QI-Twl>qd zzWdv2Urh~VUMjA3%86HZ+C3H`ZVG+vzb1ZYk7zdx(X@x;W4rN8y-Mv6)(za-W>7=x z!v>&QWJReeg(=< zfDIvZ7m>>)EfUli%WNg*%eD2Yi@R|c0A?!hfOXRaSp_bLixcW(Z3R}9;nrn(ecL{y z>_AuSddiiA&|t39yP(LtcA4ce-+5j~#wonLIl8lDxHCMaE!|p5h1p5zJ5rC2-L#iJ zv|AkHeP)0i7|&TDe8$ZryP07>{2(91D-!xNc$Fe!mCotQt7#^1K@My5>qd?$=Xjl8 zmE0lBFx_G1KmV@(b%X!kDr?YJ8lRUeb(P^z@N9^~hwGYacGDkG9@h>WehkyC7@vMSdVg2)@fY>P z?PFzNmv$b9%Z)QE&Vn-F-cT|P7p*uh+Si2z)1uT7L5wZ1imZkbU3e(Cs1K*KDzPIA zBBnH0uet<(W#4@H0*+KPIU8V{B6n|u#~wiT_P~&GwoN!AC0MV2Q3%KyWp!U}@|CYw zjsRE(uEp63I`ekaI`rdIKhLG#fq1uqL#6^4)v8nNWRwR8=Ot;IHhdq6tp-#m z_IKDCO2A3tfdR~{8w!rw?5bp!5u1cohgHd+?K83qMVHLCj3%sW)kWXt3CMD5FzVUs z3KxFSx@(KsmR+3GqCA6Tevdqk`jZYhl>sCdjrU`q>y-_#yYnQ6*8yU zJH9mVX!uXM6TJCt=M&T*RuNk02@%}&e$(<+h`Q03^XQav_n-pt%LZ1s(`Jj^%EDN(4#AZ8p$2q9^&NGE83xpu&WMIOymZB{zu8x*mx> zA7lDR%?H6PZ`Srxel|Szy4A*BKidj)iO0QSmo9UWiVo27CiK{Up-Q706<6;c04^Y4 zywj#UUW$+&AX7`1sHV@PH4f9lm|I)=0~M~b!;Wnp>$DVG>9=f_>Ik$GsBju2q|tqc zX57F0Ynh*BUKl@2D7yQYjH$x>kIA8dXct231QuHzroFjo6jkYFOg}`bmw*mRoMwE32Cr(_GB4 z6fn57H{~%6J61W*0W*E!Y8nzjuTR-*Fb{6Qj(Hf+4PGQ6rRcAS?d5GTCsO{k&;;XB zKGe`VHua1?G7C8S9h=A7V&sHtqk-w0M%K}xtw07q9KON&++}(6bktIG>hoir9ez${ zG}#$h))bnDjm4x-7|hPAEf5^5Q*}2(x+YY&`6>X6`B%z?QmTABhZr&!bd$v)Cq%u4 zD855xXt&rWpo;~7+AN6ON~wd54nXFsEGW_Lr2-X*n7c3?pfV9L3dS%e2_~NghnaOx znragCf9Varyj>ZMKRe4n11fjiZuZFmrNHvgnJlhPs3=lLse~#*W%A~G-X@>dg2!hX zequateEAsFR*hOCcZmHIS%QJPOhO_co}8%BdIa^|q+`gBG|FVaKgt$q^7e(~A{Jr< zm6S+JHR$g+W<%^!hAQ6ZVYvZKtU3tu7MF%hcfP!W#EyDh5kN!)sk}G*ZDU7Trmd5! ze$@ob6OkgP%Xk)D)gd|Zf`uG9pw39>*LGQph94Yp8{5NKvZ0!6T1A*jKiHyQ^+t6z zhRZv#M*w6kD=Y*BJ2rJvP=)K87h)6?cxjKS1G`%<3@ky2ZFhM|kl0pjy^JIV^;Ye0 zu=t(xt4tO97;$osM>=Tl$v-TAubcK$<1zPdpxF6^?cm_xQoy-_kn9dcNNtY*Xs6Zc zS_ORXzjK55dBY7R@1N0ytGvG;YZXO3jhDqE0$v1}Au9)|-5i}>Al}sWD9i%|k!}^! zDxSbIPn}WLL#Nc)(U2l%F0R7yp{o?aX#|9W_g_%a+7)(mO4!^*NY5^O-O23d0vOO) zgIlPd9Mc$d`@F>RL!t+-0pS$*P*9u~CwQ7viL#ZxqnqgYrb42C%HsLQT@miG2ik1WJFSV1B`h+7|9NH~y&j!c?TR zJcVPPcqDD4@@ZQf+Ao9JSH!#1LK!mJq7$-xu-!J$q-xvOY3|MEJoAmS!$V4ygLfs^ zy7i7UaGD-IQE6ZLKM#3hs+yB_Jb}@;!es&W~v{b++S)-sW{hy6SVm z4B)b<3)?kK(i6QX2G2BrhBuqv>$5FC-rO3A>E^B)OloOP0zd;7RFA=GLQ#-yIvNU-Pe@zH6bf`$N*Whcz<^5 zK@i&vx;#`_0fwx3G5mPI)|Y_DD4Te5^~eSCL3>cUapQNBY>b8xsDsXu_f2tK8;jpM zvBR0l_(6ceh23Z^+UyAII0bDTWYeUHL>CHQCoF)jJx4@ADRCoH{|#&9P5xwIR+Qw%H8413DX_>4!lt?R-`-A`o zQ*{RgnRpQE54k!`g!dUyicDx_&_~H+X_JuytaL6QvkXmFh94Nb{}vt-)tNwj9;Jr> zmcbZCjp)yWx$Jcd8bIrwhJ>&>MuU#J*#@Q$!T1s&5>tqo{~gheGQie1wWgzt{`N1nRe3RuFR+{xz0GCFvW2#AI0*=&+9gby!7xKi+yK~n}<@FPJ zs{ZtPkmeim6dC`W*8nxV?x!@km*BxZ=`c)+`qO zyXz))wy1CRI`JQ;;LT{Q!$U=E;SuLk$^oVJqQkJ0UB5Yt$MY4{f%s+W>z1!4<0tLo z=zY)B*n!wc%yv~NNeCOFQ4Fz?yRe&+)$)o25t9^@9}@{fE#H_xMS_AzZ#vNB7K{Q_0m+RT5gq1Yda#Ey=R1ds?Gm|D2?Yktcb7l+nB|AGl^fsGUO-`*$+ z4wIEycoI98wWkVV#CO4803^WG(KzY~^s4|Md~wAe8#LAMF~cb`4sFPbo1_<_GetoA z>z|yBRaw%$=8A#7;}o~&&PYlMe9bV0`T;0?rGir(qizJbxKv>Mhv&mxa|{v^Myqon zUr>Mf#903vn{!80m|_zFf4R4hU;><$hvViVy64=`RTrOtby(qHUR1|u+Pmis?;n|J zMO)TRYQygAn^?-@=_0vnKW~mQQx@-FMwmT?(<_zcVSb=ffyV>yt}v|I4&3(L@X?5Jyd%D&;)6<4FvlIYNnawW5Iqv`=93`BFW$A$sm z-MCkh$5drs?luk!pKgKcx-^TGg!Ij$y?;iv*6wBe8+>_Do0H;t|G8>a{C)bTD_AOl zUqXixx0&YT{A@;}FGX%2^V?hmQ1d1>@aK@}{ns5tl@*O!S!ygT zM}D{69prG8m!Mf_tape&(PTbHL$~=8c_Qb}AWs*N*>w$LGJifR3vf=mFt^$C<8wac zQYp9MjeT@Vx0vZ*7O*#rPn;Iy+M~dUXbF18*u$O3)ds$gly*m!TH??7X5*eEe{?XPZ2%DQc0uEx&?egLuyAc)F3=N+c)58bxUxGbY|amP%cY^RgX-z$ah*lUg= zBTJjIG>gZ6ztSkR072lh+#Jdf~PltS9L;IYEVInhxQ>Lh82f#r=*LR7ldgvQ88A`rVW#%YEFX*{g9 zhYde58l86`(vM@!&v_lfVYU|yx$zP-v_~dzc<_>EO11F2e?0^77$>|AmHHZD4E~Ik zZ*A4dqjs*kLdSK6+{37YxNYMRuPUKIcM(|H%MfS@ws~#tPFrB_KXQ*lrlXcII{_y* zj%4%XyJ##=!MF-Ex`*_9l`A0+EuzOeFAqQaAHBvfLela?U3g$`8mTZfGtUEKNy32$ zJz5L@vFCy+nA-Z_!@%Pm6&|t`T0MW#i$<2yLm|`*rjBJa|H9I{_@`g=Wl`9$vZWNGgfdjFYKuxs#E+6Tih$ZI)tVBaK*gD9J`PTX5 zptCH$KaWwNTagqw%zDCCtn@W6+J3IQIMF&-ce3HY_iG?v(xI(=v1zWm=7njgdLolG z#|>q3!Gk!49U6uFqT@OXZn=I&HhH|ppd-0>z8ja?=+A8Hc3<@m1=ygkp8iSvTB%)- zqSjUR-hH^)`1pWN_qvxOy_JGGdm_weo(F}7Q-W1vG&xiWvC&p+*qVeN31JzMk>vhCE0%Py}OO{qZO{(gIHRposhaD7g!v2WLXaRvSH`!7iJ+ zxUu7go#`+y>&18s!vsD*IY-c*2tbkqr8)eq*_=QI7X9aq^;Mzi)ER*zU;b z@>XUIM@I}EkJ5H;_o3*%fA z3Eo05(~km&W0WzynM2%W@(FgHTFWYuWpJ#J^#H-}4l|@sx-P(6b{!Eez0^V!Fu<#? z3{F!gNXcad%2*6aWqAsM2Yc|U{Ngs}6|HQ3u0m@S6_XOeG?!mRx;up~-#0_r7?CJA z4|qET^%;fXP~BniQg-vQp8>*ePlNQW4C0L1H{z1R)pObNWetz=9ST+6nmXC!v{(Xz zLL5@n=`1f{>L1K**C&+SnwZl34TZ3iSEJAz9rAzK2o`Tol^Y1DUR1Vpd1vO_s zgho6^^i8XS%!;C|;k2B-s2iFC%%~V>U2}7wK->J$8Z>eaUKTdM1M6>42^eun=|E7> zIhTgXn(t5w?VF)QEXTCIo#}y4Sq7Q7*JiVDURQK&;3PJ9$XxU6iQw^AGubTzOTNmM zr)>#(@>i=8^;|;zdPww*e;W&-PmX(8?4UVKCu?n*{oV zet=;AhR92U6P%m*a@5`OhuJlsk7J_r^YyXcYGLqIhZA6vnDH>xjO_?q zCP5mciGzRgu8RlQF{Ql!>Nz2+M1Dk?{0=;JRoz`(r2HhjB!Id)S!p%6=$h0{>VL-c zc_qy}@RU3B3B8V~!V0WB(MBNZ80-a)Qc zLSvM*DI9HY#29$m8_Pky^4A(iJ zX!d*Z3N@>Fn-82oNhv4DenXGF2EWNfMSgx?+193A>>^3inZDwqYw~w8FZErTdZp{Z zY$`?)`3!2I|1Y1LW%qlpQzUeD@4F1f8kWlfg|~Lwg+FrE)z(wVi;Rnl&afAlqt$-xgfthO zNRkY);bY^E^p#Wp4HZ}O_F(_ceb3^3gxa3ZSn*_++qEynE_FH@x?8#t*9vSIZI45f z`}N@epA#C5JSU}1Wagwi^krXAKVEi&@2J<%j-|pon8zh<5q89e6mG!>OUH;iKmS8R zVZsLrg3g!qjhIhMc&ox-ud@Lta?ojUE!-Ui^RRM%xxQAbf2=F25z z2;O>Dq0xh=QXCA^(eyT5vHEuX_dJ;hq~X_?F_xFbp)Y+8OuWXal4r{4w3YW`^N6Ry z-gbxEC-7BSLX~L{gf5D;y?q3ICJ+18h)OSXx6+hQrbQi-)oTmgHpkcbu}2{vHk-`O zLyVL8jm-?zqG@O8axv=rei6~~xjBMykMwlzi^E(1Fa^G5s&DN`1GPp6hM(Kzds$1CYL9#A&7obA+of(|0AK2AMFpW zlGUE)sL&!?P<;Nq8zdx1Gg0~t1-))Cj`0W=lWwU=_)9HE1fyM)iB)q>5jWjTC9a}6 znX>MZ((IoiUN6CFxs#o1%8910hZJwG5h_1xp2=ria(#`uY{am_{IigSDUv|=T&1U zfXtB;-E-NvIbnH7`S#!)s$lb_LTKHxMxBM_4~G>jvQ$_3yidSJ-mn0Q0rcJVj|!ru zZ9{^f2TcA6O^$`y1flS0(6{|7jS3~&21rT7<2JmbvSiSxL}NBYP9&Gzjq`?_yctpw zC=2wgtN?pUuQ^*Dg|u9cKmlN){*LOJfx`i)DJX{=8(JKT9MVTtl=Xhzna!Y$b0~=; z!mJlNNBz$$M{Yw@p*tf0jrGzq{hX;hSm1oHE=FDw0|Ou`aeg8s7kiHoo9`OqqqjY} z=;#xAcm83kb}%Nr(Rppzef8Td$x1F-?cwfd8nxTCkd55WaTXruq1F47ouOc8z zF0lU*S?lkVj?J~qi_x}1NBm1fDQ{xOY?R~OFT-b%nL#)t2K-X1>9=lpe!AxN;iS%bJZ zXr6k#h<`^HbpS#~GM(zv4?l(tJnnckmSswD6AMyiNMMQ zb=a!_mSmO}yFS98c!3m?v;$3(68P3F>L@x@SQy%X>xkze^8wSxoz~MISFj%k%QIc6 zHv-&>#}+=nG{{KKwe(6{OS;T~+dRKUKl`8(Z`F=Kh)5U=OwSxbDvJQONyX0`B_^J8 z?bcWp_m%l8r#nUApl*AHHEbX+4EDE5M(XR-StB9E9{m6m?6Z@g7Rm|$y4|va*@Nx0 z6_uwDz~)D_J|e$HUf*jpUONJ4wb)elRv5W0HBWVO*CS0`ylURhZ=Ldrc1 zm=wE*@7muBa#ElahB{y)2WZ2n{p4L=A2L#{J9rlk1(rK-FlPY_)T{0{dU2?XC%?!z zDu<6e`$+OQt#=u?R%R6B36>}5&j3rrQv&RcqlWjC>dNm2@41#nP*b!U-eeQ%$ui~% z(h(c#2OVuUzjlEI6V4E03#)t#?L*(wFSpEeKa=OmI7vMaX6fe7gt4WP5gM3qm5&~( zUHc2tmksLLP&3%injVseubU%mTjr_Y>Y;MfmhE+-{VsotL? zKB0|zIh6Vu+)wgd8Gqo~#SAZv+^Emey*Jxyn7uj|of^LYASh4u^Buy?k5^qc7}+Z& zde!`;RfXqyMaidgq1uwq0AvqNNUCAs1#I?)_xxem*xwkqgoL!A9gs5OQhBId0qi1@C$BDJubj-mwLKfB&a%FfT3)v;>50*XR`TR z9l`4B0W0zU(DMIUhj|z)gUn7%i|J=%j?#EC*A~n9PY?Bw?fX4CcY9c9bOkY~Eo-_` zBStIzG&veZ_d?9U=-nr4*h|9o1UlS@!To>899P9CG%V}aHELKh`Y_i0E-5^G<}k+T z<=0F%04sevE*Wk~wC|3)QsCb8;G#2XefWHbPqQdQAx44UN8zo#z7*lxaC&Ea>+kd; zuz+A~OzRP-fFm0*#_*oP4)*7ODul~#U~ zrSRb`W7>(ejluGTn;Wi*CJR>(<>OmNT2$YgZUTg@Ny@!~f`$?Ua-UlETWOrG{tX3yvy6}%)FVvKV z0_T2!$<)-JQY}F!;f;-b+_8!+^WmT36sG$R&rb`avYE4?6U7!te>0zklDTj`P_urv z{}@*1dXa5CV`v3=L!m!g1L{W)oybWgNrl$Q&3#d&oY3E4oxA>ijoKx*n9#$Uaxj3; z%qMFGDd5*k(%{&aDUC8`VD#VsUE2L+n`_GQKX~N#^8*HvA=Cq|MQYkEmHlx%>JmzS6F=1)aLj{3LNuxWct+hx3z)bc^pO zfFPv{q=kdb=k6;JgFf9gImUeAT6)SRBUuWyb@6S60A^lMMA5M&g?%$mZjF8S+V1BU ziL?TVvyOv{vX|HThdSC^ub6ZeK|8(M8BuZUle`b3w17S&bEyjV_WrAR%2#H_{DK(%q%f9TH0}-ObY7 zU9upJbf5S8ojGUz&N|~bjtJ`P^FGggU)Ky$p9Y~`f$?rSqgn;8+-!NcT0MELM#Vj& zYC+{}4ZA9k^wVV}Ec;?LN-=O0o;;hR(xjmCi%kn$U(Lf~8}PXAWDvdxnL#{|l{UJvc|#`iipPUs9EIGAOM_Ts+Z zd@L@*`_qBdjTwK8cD+gqHHgC_ZG|k)%b5J-z0LMWT0|Fv3wG=miB}(*+0)gZdhvy} zW)n{yx>^8l7FFt++HHoeSl+U?KD2wvjv)giwod1Nm^?ELajI&kpM%PJtl(Ey0?Z3? zp7Ncqv%SAMw1P!OeFm}_Qdc|+;C$Eb?L?b3^O$KXUY08GS875sV$9*7FXZWhp_X@PzTV)z>r`s+M&>^@N9a=^9iv*>d1|qmDA4d84_iumu`{5cd)l2cR=(0 zd%$I#w`>P_@N5Xbu!5q-;on~&+5pk2drfaEf&nwOJx~J@v-9@jmT0%5oDb3o!(@vf z3J$)b&C{a^ddK%W1tOs4&)s-lJAm^kdhb#L@&n{oaL)Jvg`JRaqj#nb>FcWUR=i4`6oCRpmC~B zR=|J%#zC&{vthqtq&A$TgBS@8R*Hxn)gU2{unk}ZH=$I&@l?qD4yI+AtEWO&h;6TZ zU7psZy^LFEh~7jqAWVoJ{=o@Q+ZAx}3dDHD?WA#n&C7Sxl8yqMmKa{ewb=aXy%u>TElgdO34w3{86s?4pP2$%U?@(u38k4@e{=lmF7X5Keiy}fuY$AG4ah_r0% zW1ur&t#Jh7kO#|c7q}l>)978k|Rd5tErZ6MPh{X z%~zL1Z6KJz-|>n^gL{m7%!0Nr_JE&wF6IUgXpmeiUyzilTx$^3HjZA%X`@Y3_3bPX z^uUnr9B4Qi0!*oJ=O>(v`t4k+r0; zf_CMO#rVXhy;1Lyg?7q|qotZ84epx0qCE>djZA>kIMohu31 zv0rJJJIvs{CAW2uV&rEnkZw>z4ACh) zf@03pg^bf zRfqGAw6EAd6NI=g2~UmddIBzgCf$)rA7fNw#~!wQ4=vqk=T8arTbpqF1~j*p z_c9VFZaLTA$!urrmCAOK2lSP=$~Nx?qk6E>iuMpJAjWl{j=E=xwV-zl@NEy@JvANp zVeZA1_d~B{oO~YM`uG_0C{Zn;ZT$4!^tK@p6{;ph%(Q(G#pap*Ao9r4StT})G$uhbd-GRR)SI&o$1p*tafQ$;%%sp-wuCn|!0Ex_o!>8w(8PQjeP|f#T4z<)(D9jcOQ*;w_lmx;&n^i7xH|!h_E)lNybpA9Khnje z&Urz6OY7|pEC5+PNOHx}bYi}AvB4d)q4LK~PgLS!ox(COvt=C)^xDT#7zwDE)#^9u z+^cO-JcZ+XM!ho8;n{L#Rn*__qUHbs+H05;WKKo|Daho5^E-_=8#Kv01x-Lec;3$tBq8}uo?1@ zunaxt1VOn&D2X^n9visJ$j*nH!AeT;VZZ)Nwf+>vGgBAQ12nPxN{4jrRDtMHQeGPqvoB*D z%|X`I?f4{O222~Vyou@D)DK^Lv5m=^sRc zobgW5OstCPiE@i?NaDQl|9-ehIHy6rIbD-JSktaA=n;1 zK&dOTp^3~92n2`U`8*vUclcc3R~ZTSGlNOQKErxHlkwqONAjI>aK2GiGdq80aQ`K) zpDuLJS++>-R*9c#cs09_;lnK5qhdsJ)Yl-BARB>{A5H`yD4&zi0oTcAZY^ibA19M> z_gwfN7tV260#8I?6Zsln$CoRNl$87T%%>Cx}|bj_JcT+Ps3QHdG8AWeUDvNp#lle&UrPEU~m+Enbkw`or<*f7m> z$Y17^5{~Oh0V52~`Nf683J|2kceY5^zOlZd7?Q|-#|>oY#ZUqI?0Cf?x|=nsuhDvZ zT@IklF^2-K!eP@eKmqvamZTa;j7R*QpKM5kXz~G@88>`d=zPdvNdHumuaIsvKP( zIB$%989#5MSg1cw_0h5z&#KlM*uJu$LjMkmtXN>=;5{&IXI~DxV&5Axb3Cpa^TFca z)tqI2H#@C2p3&^uVc}2|YyGhOH~5GGwzNyFThjMVyXctf;$GJtusW}$N2evvXfj(b zA9ImhEJFXu3{U2vr&;GHVr*^bmB^X<>(I_@phL(&MQEuN&8eWwwCJ?zCaGv*^m8lV z7acHxfqwuFzvYq$MfF_25G2ebrw1f1;Kr#NWQzIwjnO{onQ^0amwbGp3k}G0%INYM z1xQqTifGR%|5#?`A6}TWpAX8aG}cBIB6#;l&lUU6PgN85fM~u{6yelD4vskP8CNF{ z>(5fcVktrYQGt)lPgHhQ0ka3<|lU5?`&qh`nSyzs?!q zx{@L|xQtEv6)ny1bchias`NSy>Fd&Os@+AQ^nlS;L2nXIy>J@PYOj-x%0yH^>Er(eo0Jd_D>Zneu=RLzsdRK+VgYu zv;!G7he2+obfqHYY+Q?_t7T|E(vS#nseC}ewo)5I zi7i{q2iud>RjT90GA-qOF(?0Kukq?hmq>B}rj&k>IzRoSM*>T@?&mDB)rmRQq$+6r zrRja(Hz~WcHu|g9x-& zdp{8#t#jzO;!>{)QieB9z_uvyS1jd56K;kuDcrfcQmZ|y+LYa^sEdLA@Ot> z*2kh@&%+v8^caspD5Bo`t$nt?-g}rw>w-iMhcLN^sqbqR)-o8jp=0=Zf0h+T!p(M~ z@U%Nk-zBq9#d4|-fe-Rz=UNyI%4mld z9HY3d((|+{q|svli^v7wq3$g;Rpi$!sT5bS(}L72s|eu|rj%l~PAAp5e=s7FVw(Fj z1*tFG5KKR8a=N-tuSzAJrMI1vYHq<&DK(_~#UNa1h(T{VE8!fOKW974!=qBIMg-In z(ZYY2Z>i*e{FlNQDnTDz;5n4fs5^@sH{LL4>ll3VZ)tO_jdw6@g39ZdB5}f0K5Jzi zSZsPx+U;tKE`H%3;)xp{mC8hIcHR3NM{&B{ezTr*FS0c8d*I)nv5G`GE$lW)Sp{u7 z02!4;SG=s+nKD7#Y;P{??adGOkcj*dI$5K`Q?C6H^Mql3&~kmQPb!QYVXd*OMtiOz zBpay*sob)Gkp;}pk|($0s;SZjGAt&|z_Jo)CCe_J?|yRqnUXp`wO))CXHDBqWhgBD zU_~V~M$rzW>AzF9jXS8(y9Nv>A#n;Tu772U1KI}ktl|w5B*3lPFSfnzX35@$N3ou3 zi`)L3X6)%0xg6kcCbNL@(OHRNuP# zz^$?0wcV^D8{G*wZT5JvU%wmu;Q$5|LlWSt-cnn zO0`@j;gW=fq%zC4d0|;k$GOkN(|;UrzBES+?G1L4S$2AH|AvNdTJ?dy$c6Vn_mwG~ zBrZ8o)IRy>2D81RVSoDyA5c_N8G5|+6E+l6>_6PQIp6-mWjJ`fQ|GA?>2u>=2z2me zK`{lu4ECN2b7~8PcpxNvTQlU+S438vv)lO(kH2wuNBlhoUx^hn1N9C+M$Z zTY+CO+UDg|XoqMA63p-H;v%prgfEXH{^3C$$x6}6j<6r7Q#TM1dwa`!mOjB`q+Bj4|;FwGEP&iLQB*@%>HDW zuYgLf4jsX!=ywbU5G#?3oPBQj>`}N%(jF_%RtZlhM?9Y28+ltp$9t7xL<|mIj+7ld zAe{!SXP+^8?^z!bSi(gzp0qL@cTppu8QNdYdQlfAZan%-Rm@FYqmweO^}v0oN^T=i zTwfWubMd$Wy`uVx3=>oWfU{;6`k`ZK3xge%8BZ?HnPtsNdu1!qU+fw{30@UT{unW* zmWVq?|6b}QpVNjVdZ-Z;B`l~O==#xBK;FQa4?=SrzbyCiyLwr1CM?4%1vDVwkKn?j zlvd2o!J)FnAILhlP{&^+m}H#h-V=<1rG_gTZ6aZ{Ov9~waXMzJz#&`mba>~y)|q6P zb{Fu%9&FxGb^8Vx3CSwXQCG0apjERX!J0scNN|gVb;>`!U+`3bXwTn8R)kGuB@E)DMKrJFjYy9nBcQlEo_@1XTiBIBKBp-J#WLsSk<8NQKhMdu` zMB?cll1(?@Wc)2|RB`s{p#%;XPS2Zinaiz6IfK{XdVAZ84PN|pHuygAj*=x4&Y90v zh}ZV1A!9&HYv@ubl@IcPINm<+Muw*+bL$Ff<3eJWxLJorh7dsu^x>sa`U;)bQt-{JwfpP)Ou_K&?-JjK-y6h}D>mM={02@qqVXm9RMx^~rQc75#k81b&13lyVizB_Yd_iH{`%6! zP=8slpde$wsR?1i(pH48TXAt1a7kMMc9Ox^#lHgbw;7Ya z%G!`hX*kU_<1WqEP4-qp`-wW&2Wf7<$_rH9tru%>rHXR^Yz{0GYXa)sY$oSh$4G-S z&Vz0cT~)on^K7gg?!5B`FkQ9Vu2X%yyB-g^g9aLR{P}b@glnB`0LLyXqM)^Sb920O zvDpg&$Zds`QE1WstI1n@Yw~M|E*7Zw(=Jx~@M{iwn-7nw?sNJerC(Ep2pT-(Ne`@- zELSox)gOepC<^9!W)(3S*1jH|w~leSO3&9=dk@GCIiu!(l}w>YLHIuBF}WpnN+{N6 zTPT^Lf?ByW2(`fG&@5!)m842cnI$%rO7tjANZj_EO@UQ?3nQbdHVpU;@?yZiL;iOx z)lxq?jdFErbK8084+uL4K(yY)jj%+8=xziYr2h9`HM8LxUa9Eb4sA|;k`FOGQW*Iv zCYFmra7n7xHh>OKy*_3ZOyLD`rc7AKzKhOE3>S!@JTf+O#L#~9X(LiL!1gxr{;*+R3wsPsIX?)}z_Gw1(dQHPx-rI6n+}jsQ zmCdlw{gdZLnV?S1piMfiO4C!Go-?nek>Wzpc8=0h4CYVaHqEE~VB;5|T}O_ejRZv=u_bwAyIKD@tZ8>|KtjF;SX z{T6l-0Z6vexjp6xX4kb+CY;?hz^uiVaHD_!yJyh|ZjyVhbgRE={u&m(&J@!~4J%a_ z+5(WOK|bSSdw)=njxvM2*D}7ohS8t(*{_Kqkm?ckLztdxpDnk599ICf-z~9`?-dFV zyy}kzhM@x;l=Ze>)S6XBo3Rvh9PiJ3le42{TQ$~TTj#wR!m?4lzDxeUe8`ol-|tv2 zi?sRO<4#b~TVrpBe`!oi#;@{u5uoC&)-t7N1}@!_xDf|C#&J^v3s5i}7$ZpEjP2cpOC{NA|DFp*6@ zry9mF8{fMc3HM~N9g$W3WHZe=2Bx6&BU@m5u>QP`rV4geK#C_f_TkQK{vr7>ob%!| zOfM)Q`D~#}|70d^S@Ql|Es2AuYt-ECD0R&l;DUky2m-=Sa1!}ZA}D5wVS#{HVy?PX z3ar3km&*YjYZJ8*1kw?K9(zc*BZnoRbViJ_)}tgoh#s^}apGg6&}dcPdj4dH zT9%(0z|X_E++_;tF-7k5pks~N+IR&Mb^!Hk6W&RU==-aKhtOkL>#u{DETB!UrGGN(l^ z&WKG0m{KD~bJIoB^HfyuEI1QuAJO^V;t|UON_Hd^>l0IUFA7g3zb!mH)xa$FC8U5) zT89Ah1%X(8%y(HUC#AZ_vBQ!AHv6}%5D^lv=?g3*M^Q;hWGSvrWeL>eK zh5i0rLv*YAx)f;nX#AakqyngkW8hrrdcvA%{?K^A$+7rpjv)lbRViUY7QW!5Pnof^ z?>rM9SY;7sfsN9U&T%oLyOP$d8$W}PV{$waGtp7uT6W>q_4a5Q*Y;FJxYPRk94y z=+L^=Rb3jVSkh=yw+0AW;3pq{mzWK++NTM&$JW7g#P6^evUJCE9OK90fb*k&{sm{P zVZ^gIIh}!yw<6^tR3WrwVv+-1GLt4JCf^J^F-3@d3wbWCbn-aGPr8zFAY+#)8;{do zlAWD3fzJQG=+HH^>sIeh{1(ga`7c(-!#!a6Y72evVBgYQ{IH59SQOQ;v%;`52+6zk zx$w+f5h34eQ2sm!rEfa+ef7b_(rNFXPM;^W_;Q83Yz&T6?f{i1wN6Y)txy8q-#B*a z&s(i2A8xM|QBKyedz%SX&igy)(eA!sZiIa81YoWavosm74=Lx zV)i*cJ$1VDjcCPY5CeJ)t3Zq`Vc#up|5TNtD(1e5+JWi*Jd4=(dX|We3XB3vZq#qK zroq{)SySy&eq%>9v6v}Fr|H#Db>ou&&I2> zuMN_1aYy{ncFk80znO$S z3GL6`8+^0PA_;CKf{jkOLBBiT9O?7Dram!@h`(X$lWE=C*%7%opUQ6LZy5QsLl@HT zFhfXedt%BI!>8DHg0)j^n>D1^Va|F z0DvDR^}m_h{8aDk)w45P%?t*`{bugfX~0OXh`nL{Uc;U&v+YZVl;_7j-0ur z*7e#Q+bdy#7sq8u#lViCUgX~*9=?zqskYsrRoSp90^l(e9{kBy{8;f)3Bcdi@7R%H z(k6D>SsokRIivzEjPLyTJPZLUC;NI5K!T@w{GjC^kPVoq;elC~$7+LkNlZyyUC;K_ zTB4`f#eBV|kHlk%@}DS54ooHF_yn$riJ1WnGe^C;=pp)xD^l^wma9z}9M1xG+$M9` zYWot!(@%Wl@6}{x*Iq-<4F%C~@-cVDrmsy_-S6QPCNpdHEpH`Q_4w{vT1KMo!`)lW zv=t>J)i2)3$y3tDhxD%7f^GHwOxcYk{~cV-m=i>sVSo5Uf#f-5o0mQ;AF?EWf6Y)R zYVU%l&ZAIr=xtGkV1eKS`S~>NnDQV_3Ox9uN0IG;qC9j>!$+0jFmHO-vz_mS*GUiC z%ok^3z0X)y7UZFW8UzHl_-r`^JWZ?qWWh`paRZ`=@7l~WpQyRQ0X?wr#ubfHHceZK z=%4I6IiqXvB_{Qhup!qJ*{6i-JDY^D&8^&Y7X)&9gMo0oSuQ>L=S@1YbM9r7B+U z=Y~X4|K7eU_}b=|9GC)_pugw_Wh(a+JlrHuPzPTY3V9};zuNGZ)0uZrAQVUmoTDrS*fyonx<94`n!a{)a5U%x|LF5LvE$spCY z?h2`oG#zn(;F{1~eP~p8q{>iCvZY*zXBYcqHBO@M3iq>INH`t}DycDmbSDeTz%z*e zb;hKU=S4L8fdGDdVJuD$PYK8Swkr)!AIWQwB1jb!)7$Gq$fm~%DP^$`HD&$-Hs_EW zKPM#`6n6ZFN-*CdPtFum`b__=LBH(atAO(F8VNJRIMA=O-qePfQPuKc(-%x1jIC>1-*AqPSnAn5Vi%IA53!HOU5{)uwJm-3`MilvKdZPZ*XO0DE8;Z0l z>p<0zEuk;im8kf=+7|^dlz#TRpzdoZ5#h@lVoQV_p)K7q$St-lQ%vmee=?xz8MQwm z^z}>;Jj&;KNj30~7M=JWr!X40=Eq9u&u{+#iTWx7EoPfB1D{Z*TcCdQDpFpn7uQGX znuypa(8#G|Sxv83@3xb%w^di zsSgF;NafacOvmn1H8P>zuYMFJauuNal*XZx)L(NvBwDKNo^t2)mh16{kKayg53gj0 zn3XGw2^fh$P}KLwTUwqKD_qTV`!e5J)kocK2+?;rA?C-r%Yh4+Lpn}*Dzv1Khj!rw z%h+zVGo=4Lnv54~QWshJp7yxjz|U%goyH3q1^Qg`C-;YAfwFt;4jJmzWWODv&z%M- zZdf(!CIDd`YOeQCM=`V6an)fC8gc&lGxd1MV?N;-z_~sFlQ9vg7~J-k^F-HnadI21 z?fr$}umA>!-#N?555d;QJrV?lo@D?o4@QuYk=X&l@-|6hr~T^1N>7dYqZ&^Z^~3LH z?H4MuB)~mjrY|4@Yqv*ljHF7W-cDq4vQZ=3&}O8ix*@oyX0E(B_i=TpCS2bWi}t-Z zeN}CJJvepD7e0IE_R`5vZk*O^wzj&;!hCBM%JC8p8F(=w4AO%5fcAl;{3*e5Dfiyl zM3Y!s*ZZbg&_YJ2}_G6o17CRs6G9HNE zc<4BNj^|Es20lvd|C!UgcNy+zZ?pRNZqLJ0ZvW{ZaN$cN;_t-jnH1w4HBuMHv-PC` zm`amV3C|;gfLyu}=iNcmcc?xw10F#MZ%6|4Qk3andy<8p@88n1k*0{?Y!44r$fDgM zkwH5=-NXR-_^G;Sro$TX3-Q=Q|jqhUM}Zgn%@Qu~6s z11+n~4()t6nhD%lD1+6#HEI5g(&}Fcz22xrb=sOa=06f7VQ_E9*@$1*g6wf3efNz~ zr?&4o0Rr6L-KonnxL=XOv0%fc22f80TRS}t!klBn$e-c7CL7CSPEW>2`Ww_y?efyw z+m*i~F7J?3^DLdV3Yk|m<9i#-3#z@3H2_Q(QY}D0{Ud(GY4%xulyvFMjC6hPeh1cM zr>__0fd(?2_xzksUvR62nx`O%t8aD4m(x{WOM!CjwYqd&uicU%|8G8c z>}k}O;0zPbiFDHS_V>UgG)8&2Rhy;sMeOSzFA^Dups7N15V}`dYwGYe)tJF#Fz*zg z;;9GLTj~a@)5uvFf_Vmr2xfX9)Vqw<*UPcG#w?{g;pZ%u`}!E|x`dyU;h!9UWES~| zTkO9A17Fm-D#yUSSRiy2NhiqbqCtLujdeT)jhv0m1aycZHYwx52lOs^e#qo?klm05 zx2(y$?1D==U!HH`-eegMM;XQ~j%7WhlMSR{-&jJsDZgT|X8viy`rbGaJ$~$xFLmKD zSw56A+0x&-zcfTuaG%n>8aY<&rz&=&&HVza1eiu zE6TOJ|H?vqvTc~PgIOcdKSLc}{dEFBUQN%Dd^{H&KD1G5#$ngPUq{V>p!gm0^V+zt zYP*55hQl0IlAgPGeR7s+Etp}}YKbZ-qfWXRWlXirU7ZlJP8wRqBe4a+nu}`r)UWIG z!H5#rW6iYnL)en5>t}%>tOcI}pvk-b9d`PljvP9yB|bGgr&=!PioYNU^dCQZIF~L1 z&K@PiU*$Te^ft2+rMmU1l}Z2+6QIg}?siGq($1$iib@VMNc>UFs)pc&K_rzO6l$Vg zf_tQ&X>w{;H9zfs-AshAS6eG(s0XmR3ep#;?yMy39RaB6^$d0#Z#Av9M&pAy`*w2^i-f7kRs1=p=b_tT(?QM5whS|b{S?ugbza1G!ya}0UPp5d@ zs@->ielV2E(6U5f4FL>mX{8I7MzjJr@_bu662#IV?|R0_ z?&E<82;w!@qhkZ{@l|q6IK&$LD)t6F7xcN&ipmsz1m1r|p)aih{8Mvz^0;fP~pb?I?+Y zr@Yh1bLpd7l7KyD)YnSAxsFO>KVt$RTp#BY>4Xp>QdB7Xs!h3u7X{$jJemu?sJl#< zE4|6gphDB&(Re%%Iq}EvpP;VA zNE0xjNl#XK$sR1d1z7%p43)O$PcI54kCt~g!1;;oG3K=6<&C4E<>v0iM`1Z-l~)R+ zkEt~LJEGZQiUYL+)}GyhbVOCZpW!155MlW9#pUNVulrY3RK?p#M+9f0{;3gK%GBFe zt`cw!Vei!Z1b)^lHmah&osm7|vtvFZNX;$uGAr9%d9Ev7jjS=zW#NyYUjk{FQ>t;i z&F1AFeq@-nMzUN6VNl!3mTaELm4UAGMt=b+$dmDDvVotC38}FscrI!gT@Vy+h8E|D zv{UqX^vW3ggs(P@pLQ2YaQb=6P`_|?>b-*uEvG0-B#4u1fl&BY{k!*T-C07MO>W#2 zQ+Qf<1Vlx=vvmb=ZGIrmEiGJfH>^f0dDaKjg&-nn83UZppHo#y$iWrRIL&%a`?RXt zpjT-LSNBBhp(BpqZlqF8ZMy)LZjMGr+&6l!I9N-(V!WbC*?8?iF|1Ch$AlCaZM|CT zWOFqQo!b=+vv^#Dmo<54$|)4n^+ zo1cJqK<^MxFvBQ={>Z*t?Vx?wLXVzNKV?j4kl6kzKW^HvLs7!j!Eg%IbLMQKA`49* z$3y$NzFoF38>V|fVvB8t7=b7eLthW)z~+iFHL4&BX-(0fEbr20EdhDN{#Y!DLL76B zCgRC+E`p@0q%IKP%Gjr_Ofl`HTn&t|PmfQ4*9Qae|8xKW&BzJ}pw0GRcWm|(5Op~Ao4w7wJse78n}yj@5TRm9YL^?dF0uU zV1QGuhBfVf-zbGFEa2}fEG!0o@jH*wSk#!~>e~Hc2_7Xx_4c>S(W4(p`{vPNSya6D zuwZal{;=+wvR+oBv72)(N?iSA@NVBPsAuQ0dNIZyCrW_9)`0KH2VZkj!OD)N(LCNI z*5pJH_BYew^snSiiSfP2=I(i~xAQ-4-Rt+Rhq`S>^DQago>=pkjOE+>mF7386gqE+ z=Z?GwxN_1)zAIZOyBO85$MJM6e%jPP*pZnQn?uN)3E<2r6Xq7x8fW*l?^@Kf+l|NA zdY|`ejwz!#we-!^^|huOLsCh|#ESxLW`QD)eas8m^fR}(N*Yg@&xarE%$RURxWHCz zLBS5H`=UY1H&t^k`&f`*Y5J4*u#K)af!Ygq=&<%PcK#K2FC@2U>>rRMDDDimB2auV zXYvJ!yr9KBw-P?L6&lzR+IeBEr|y^zI_B_G2VuoTD3S)f0mv2XibXzD4(a*?a=^A^%Y@C+=&-wNiB z^TLwZK%{r)56Ca)03lD<0Q-6u{X43e2B88#UbX-6hDN$dD3+Chz30j1ZT_in&)CBR zuJgrf?3S?nm+8~V9)y^c(Z%@4j0xI zAF@}Qg9ew-)R>gY@7T$@w&&2w(g0PZy&OHVRP!7u2!z)IZtFAOCL!rZ>sv5du=x#ucR~Y zEV!>La>CSGJ3>|#fSih*knqIOe?WYw*ByU(x&Sb(VRzpP1Xr8N;gqM(5_l&o_DmU! zoF7-3FE+X`HTVi(wi09Z|2@9I{p(6G1X@V&6u~{Qyfz*f1}(Nayy;kJ#SCv`?fXHN zWBRY_J?V`#V$5IQW`)|&6(M>BWV*;Mt@W@ay=VKYv)wrJJq()HRSuR(`Krk*Vork6 zfI?);EJ1ifck1g{PIGPB(VRsrjN@8AQS4@JWguU;>k_N)DZD_woZ1s3VW=Co37Owy*_4+e7G<>um$v>EJ__8NOB` zJIvomIgztgw8ZzMLt2|B^F)*Sn(6=L8MLojsWUJv%j<=Q#qF=YtD4Q60UhtQnGs0r zWLl~AQe6$3%T!G3OjYN%$86oUSus2<&rQT)WBqB$CRm(d#9RKUd{fUB%jmukPL-qsT8S_|0>_mPwEF}G>Z4XXH{uaG9xUXdUXfPXn6;* zs@ew#Ne8t4{mYChc|{0j#nVZNtueE^knX&1L|@r;@Z15GM`qc(xqhOAVt}~Hu1Bl4G61LNq*Z--M6qsfy*M_f+2ZUb2f}pR{90Rz|k{Gn0|!Tatq&y zQClIO@d$%ZRXdZ_$Zv6X1hsWF4n2s6gp;+%b(z+3oWg0)1m}Um9ltJW9KDrdDPd&j zf62471Wq~=C=s?EpmcNA9lVM`A@GCbt70+5V9Ev_ zGP*#X(s51dhZn;*|60rw%Z1*nOD0d%v9i0MM!+GRw9FWT#c>Uky^)G|E)QU@Sk(TU!SQP8a8x%dILf5Q5b>M9DREXX#GEOXNH(YFK6Rmx{em<{8@ur| zrRR9@Z46fvRJu3*e^Q*xsq_3^lw?*k_Ac9_fGWzK+*=>hy93xM2A0J_z6qU9gD zn?hReykme=xl85y=T1A2b74>917`8G-ib`0;A!B5#JrlV-O}NbV~W|$Gb?FL-ye?m z9NG1LByp6xhuz-mcAHP%oFB95b5f`qC8v(u-v=a)oniYWw&`U6s$K5wrQ9nC_J=YEKks%F!cH}AoetbJVJ zrL^gjxHI^rWCXfwTs_C8w^Vy&|aqt`KiIOLC4WSd9fa~j2|+fLB}v`XlhF5 z%<~qZTdQ}I#06`eguM)_-XnqD0~#`<=L6~pgaxHnFRm?=M1tHwG5pWMi%13q2$Y?GO=>{dwAZ6lf(v)|p2Y-Z$1~I(uKJr^nMToDENOlGeGD;wxxxOtR!}B=?oUHILZP*JgUqB!921PCkf4 z>0A0iN+EZaFlO(RsG?!4ZDQ?R&mqrgWMPE28g44(s(;XKexd5h> zkx8>WLeT9X7qC@B=IJM?cx(uZl(Oz~{;}i;y2&lPE!_1z^Lu_N{P>&xvdMYpdB!$X z^j4Hnqa*<`u2*N3cRlK3=n5b01{QnZEQMU}zti;f&d;@8C7}KP8kpg>y1_PAZ>6I4 zZHFjU9}$t3+h?A$5`+Z>b&b6BphS9JMt)Dn?4H*LsJ9_&eQhER`+_eV-+i2sxZU}8 zyf0x0d%7QTWv{ogZOHiS3iZA@(wU-WYu$UhIZKS> z*N8$pphwYOl3Rx^Xn77sv;z9~t4$IAnZmY* z?1kHu1J%&`UvTbkaXmpy%W?W0$#9i)ndX)$`Yv8E|c*t&UT59Y~o~d%8HXWG#a8 zG3H12qmMD9O-Fk0fRiTwb!BGA6ElP9X?`Amv-A5P<&)16CBQ);WXD|rO~OZM`>cVz z+H&n1#p=Dlwc{NE^Im+{ukMQf?wB$AV3Hj%+pwvpH@QX+F8fn8`~dwc5c-s1`WR3| zI#c{Q?5c!q;m~9{VLnx`hM*{BKjb`E*Lr=Yv{%=3J%ml0zn&egydUX|m_8y*#8ItE zOTm;V=}HRVC^+4dhJwVgC?^ZNL@zb8GjR+ub|v?Unx{GfU&DYOO9K!vD=l`^<@vEl z&HCfXng)u-vhW5vxNg9?Umdk58RZICjmSBp1$&ykHLx$F?aEbNPpK|RV0o^%ho4%I zcRU_D_Ft|U!T4zenV z24t9Hl}r3Bj-nk)Lew(X<3CfQC0i{_sKH$s48{{-&WrU9F@08KW*~(BUSLvoN+wRs zjATsi+|)@A&LmiHwfoj56S*e_je3st$v7DCl{Pr#psZeq$ zQt>oVV(~ZnKPiXEBVcjbF(UrfWz(mz>#cu-b>$p6FBTLs0{uuZ$cCAba}Bm@cW?gU7J z2X}|y7Tn$49YV0dWpF3B2X`Ob-S>Ll@2_1oryRhd7R}w=R|A3i@Mj!%AHwk^SJVU)#e6rIN3D4;^|qRKOV3W_xb$QIDBhmOf^zlhx*l?AJC`BaUaC zbe7aZqks`PREG96!Ufn!z*yRYZ?7y?>)yS#RQ+7mGfzBf#psRBsz>WI2%=9$iBUyq zRXwa=$8@5|^r<(d`E|?R*;hWo7mG{460&zEv*B+2c_o%9a4&opRsSWsv3I~d^Liak zA$z%elWN{Adu~xhLK%PTe2bXi=dpuVA>!;9T8{?iBCTqB3l9pc4O}-$zUjo?&f;JP zr%CK9vj85*iQFgX@3=M>?5F2K%s@}Wd;Nt!C!~7}6Vs^^leM(y&FZR#46?NE)=TmY zvbid1OgqhEnR_d}GymcB>q0+tl{*XAAJd)91`T|3Un}O`+UlTgnl9t=X&ABfb2!*? zYr7$b?SGCif5|Yv#DCjm>D*-LeEE36;q$O7-R6@WA$_;yAn;0WtJ4{lIWJ+)@Q7;L zrxUOLHUBFx%s#wH2-uW{j%`^$gDlbH`JNtML{Oo-b?@$cq;>E9VU^+e{19cW?Q37z z9_GdIH`JV>HDJTB@nT1Kvm1bG#4I$i3d*#{(gG=erQr{nPbs5TD&2Rs#GVI#fpTLM zN^kUj*$@5cmzC(wB1HkoEf+7k(@$UM=;V`L);2%~BmCT#3Bc_6nZ4Iv$xmPO)RG;p zA@9OPzvy2`$o`BQ^_JVTPpS`A@@AYb=n267;DX(V^PAIlJ*@jf_)aU2$yrTUyAM8g zes>r$rkKfJ(=Tj2EL6vqSUmuo;yGc)AG!TF^t{YJEdFR-g5@LQU%xF@up<|82}%U| zJvG2bF0#3jqz{8b(OkU&NT^I=`1Rr@d>F|Wr~&w}Bu5rNgsUImu;-UITAH7R4P)8J zn1LTG2jH)RcL5mQ9Rx*hJW0jC$bx_aN zudaK$kZVd-d6mAJw$7cGF=JKMGHU1gamlX$Nuo|ZiLIsv8#yxK%Q?jUUcaM=FS0$L zqHAG?sR*Czir}ei7)a@hiq&Ld1`_-u=o;Y$hcFocL!%6&P=}0+G|@$3pbtrLZ0AS- z+`1}U0CpbrKR9EAF+Jh)^Xs5k1W;=$E-{IQwdx)5S6O^Ef3}l+lqfkx3)$eBGFW37 zvMo+~YrI}5B}NDThX`POQA@F|b4dXg2o`Y10W?@57D(pt(7=GM@H>6rHHjFdT4RZV zP>T4D15Lv;ZJX1e+-8n-ZU)t%a6R-qSuw>u#%^$evaBVoYaYP1dAZxBFPED`4Fr~v zW${hwqFGaq=;*ks(;zSvW^%zLT24{N>hp-M*NSV@K2j{HZr<4=ro` zbpt~Ur2P#>3~w^Qz%YUWRE*|lp(=MpFpASej!@)T=bIkP#$1IqJ|fQkcm~&dTv~aR zWdki`Z6`L|7A+^XY2tKLWAP5gOUV$mCEq#zOuzWsUX@3jQJ=?yV$>);b&s;+1ZtUA z)iP^Xd#l%N0BdhBltBH4cmC>~V+!?xY5^MW%X)|RtZ=$}ZZn3Hs&clB{li(g{Hu2G z84@`8!6TVY>Ej94Ge@_%WS<9+Cd1!vu$sq!Lqwa*>I`b};#P4|rc{vGTQ3W2=Fa`_ zba%X{Y|~K6T2%WSJ6DIePO`%A*buUt7I(SW>-t&=nA1Hg){2VtJC11t93gH<@xX7-y zSOeT5L4q>#+wQH4lO3;jw-uo1L~XOJ!^NsN`*6>;sGxnUzd=udumx3j%me*}+ttt! zB@KRaWV6_#U_fvgYusrOQ+dwzh2=obEuPr-#g}&*Oj)7RQfZT&Y*N=H{*K>I!=`C9Z&rqyIfwsZXW&T2^1#FCUuD_k@M0ca--p~;n zVd&jxd|EV|fVsh0$~Q3cTB}2w`1UGEztP^~-K3j<;l;4wLW>dLRg%Yyjf5EXZUMAa zdx|+0vT}l16|yRALFA)Uw3O{`jYgBW9^?Lu;9SF-9vHBoUY+RH$m>#r%M@0*CMRhm zVNCzdqVbgH&R5POAw`>#vZ;_3P4gs+1%f^+9?5+A8^xrqH1|lrvYcv>+f;!tXVK5k zuJj==%uB3YLgE_5<#tjBl`H8TQ`uZ*Vod3&if0yO)Wzp;@D23Azw6b-*J_jF^&j!U zis-=%YSQuFXO-Htn37ZsbdadQvrIEIE*5qX5TapFoIH#n}ssE(EJq2W^*Di z8BV}oJhrgQ#%;dbNr-d4+!J5Cl$@$^eSf^d`61)|f^%7bksTqd5jc|ik6bTm)Wkm* zkAfZ}*8uh4%=&n}s$p$#q^?4hf0>RGUk6SlZ5p9lMa`dv5@7Ww8g}8GE<#mM?Wl;C zC7hswmf@b|0o6;Xb&9YGn3hJ{5Nj5Czw2|!fcoLv<{1T+Wk+y z)m%TgFLU#(4gIxJV2AHSvGwPe_HkQZ&~e|{v(zHhX5C8JF^gypQ=qZ8sW6ZVk8R)S zKxvq^t|#(3WzTI4ijBHa9%Qt+9LZs$%;5Lb0qk0Rzt=#30y z$=i3&w0`&^hZCtOJ}=vKZULB~o+^>f#K#XlZcn5Wxin?y*Cz`jO+@%ne+LaJ&ieJ@ z^)0d8eBKE3gunl5`x|@uck9~^#x>z@{S$V%o5ef|7q_OUJ|0haA}{B#XFJa~BS1dw zT9{0vB~bb^&xJ<6NUJYWtg`Lroput!n9w`hZJ&tIRHJfm`rWk;V5`l9(w z-4t~4$OA&ELL}N-Bo69$IO}qMn4FWYTLUs{=4Wd8xdlP!H+Cu zJ-osnzr<8g$R948%Y>!gE`-StW7q3+5c`E$`D@Wm#_$W{xZ3`3&hX0&XA6ss7Ojs8 zM{6yKyZ6{pt>~>n&~^kN4MGeM`sc-e3dm{iilz&Qo}{3&7DdcyGgVjV{Bc}RRqawM zWV@onMa2b`sXABt|Fx$5igDX0s%t@oOf$*_9B{f3yZ$9)7-)!6*nUWW;J^?!i3V@zV|ERsrW6|WGQzKtS$b{ z5k@2h)RfDNnnw9|qXTIQS+3fe{Dv+g88;jP8Mh(+93S{$74oNV_(a!8S;%$kW5Ps66Ze?AS+t31C0F{FLU`^3s+xWN*uiEE? zs#swhHLQ``Oi&HR(#&@-%4kXROMGx6^2m<2wT1fwWWlP_9z1q4#?4{cQ2nOIqig5h zDW^+|v00tQ8l*ino=(-*y%juNfbF^cZeN_8I{!)MM5^kRPlVgF*tS6a-P)kTjoU<6RcyxH%fDZA5zc+|_mA_|!-Ck_f znR1y^->*BT;~o@jB=fo_CjsQ`|9#%nBz=5euD3}9y%PXI)f?x<0Y4Qld*zCk|2nYY z>_XD9%#AR>6V*G!gzMmPgyu%7fl*~Q*Yz}M^riV!;>RC>(~*Vq5kmXE7lyz34*rd2 zcf6i%)RoOX!C`s0$iJRY^M%(bq2TvhJ%U)c(7-y{hK*(2D{T8$2KXjlxY(VBFBgjm+#whf`$LAfyfV zM`U@@6+e!F2E&0{mh-+HVGaILLN(cw@aU_M?`Ylqd_Y^|dQF2rAC7zU6o6a|biXZ_ ztu$>#w_ZTs)C#Bqv2b=^_0OgkI(B6k(}eKqXs>&N#7@|1$(c$>j5Q{fK>sR;LtLx; zp&7WstH{VeW;UMJ#XHN4i=pDsa}Eva zoKBu=Lj`}T3()fO3L6)4wP@6T{^EY%N%Ny(u^a^z`6qm^hJ;Bwk3 zVgWzv-ZhA^gWIx-SGyBI%xOkwfhY7cop8N*N`JR5$G_texu}8DXdul00t)UEm{Zgg zgwIStkM{rp;wvOtCIs|k5snUo^rs^>7wArc`cR_^&mIe*X&Zn+g$}Y(w2{pd#A6_c z?ul~Wp7dP-)gSVJAObgH$=s_-rKW@`)^pgq#0^sEOJj;fYQ{-r^Ju#yjp7GV6><9G zAIfGV*T6SQC0cNJB`VStCGXY1(ZJ#mg?!y0dY@xcTE7Ix^bFH)gdgGn5Owfx_AYD3!#K4dxWeR=hYK_uM+ zmL2J1oc8<@jWKu^%q5Bx*bnctUiEu?=~!Qp$-Nolr(h#FOr`oqH7~xU91kX}k}aM@ zC-9cF3$b(PJ3tu+6Sznmhcg~fp%Sm+?we^mp;}5jc6zwLFhKA~>CL(8DuzCa9SZsU z2gf$JS6dv%)4dk-j!}It!$|*Bb8h-cQ(+@|muN@V4=bN^m>|nT5PND?vn(Cz6RQn* z84GY^1w~SqMvvveabdN_l8MOZ!I6U_wcz;h^ImjqO2a=rTD5QGK@$hF=kX6euZ|T! zGFd?QRc5kcfinc{*i}uGn5qwd(gwMo`p9@p-i=p zV<(Mx;i&L={+r97Hw-XwClC%=C2XKCzja+j5h>-mj0RYo zX?%LCI$xs5eLV`|?u%Wk&eaZ!V-wH!^+?I6`!->m{^GPr>P!7N#urSun)##A=HrF= z-;Onhc<>H<+qZTG61SjrAD>WtU-!_)DzeR+t0%W7c6^aGp9u8{vW*9-#F6{>Yv0>* zW<8;fSi)H4^9@_r1UsF1uf8Dw>LcI|~UAg?{Rw>hw#%0f_ZOKEHpF_3W4g zi1)OX7=+6JktGnWX(x{n8f#2s6J4hY95x}d_eYk}LtbXUs?-NeV|z$y2R?c50`dWg zJ5jijx&sF{+LWOfL17JfWw09+4LkhBR6P@oL6n&Fue2pOWl2&Zmv4p+vMOm`O27a? zOg9Nx>dQ|oaePGFcheqA+7%^$<(Oa7*mH2s;N!R|_Xt3K%X)4PCAEGiqsW$7$S`@Q zjoklumtVucjL3V$=3fg_iHJ(_)hcPgk|*1Y#RjI&!cs4WYJnP_dmh%iqAShGla3SP zyu&ZjXRcZ37c*QbN>KXp0&cvz7gqk{0u_@TTUBs~B9q)C<5Tm#H43acw#8?-_nyMx#c|STh1hrnSci)(E)GbwB^5dER8Y z@*JsYB=tPtf=jBHTo?;=a_^l5c3Nn}VOGVv)Ci-<7L9-bK#VEj zO>1Rrmy4#c-g^<#H6X$SGy|Jb2S9uXrNkMXugGJuS9i_nb*+iA>iujP*wOS)Xp+U` z?KuPHTe_6mc#36!LFRxIIcJ zl@>F9&4jdcteBiKyz8=^G;c?Q4nZ89^hM5Drd9v$l)kscGzz~zaz@x~6Il#(CJTYx zzTSIWVeIA}soQ)$%ddvnIWBmqm^aC;7q%dnDauvCT%R&tZ9MIJXABp)>t3BppIOS9 zBK(+DG4WD_4q67%PQ&_4Dkg~V@Ff||01*sKQ>4nRI{I+6r+#y`9vJ%e6)WH$YUX!U zdJ`1VQlkcN(Hq*YqK_$Y33IbS2SK?ib|PN)=V$b~=}7lGLy4#sB5%)50EHj16=-Vk zta}x>m!+Gh&n>?V3y##67HvHNJ|n7jvj`L$s4zp86sL!?=&umbiAzwQCZK&#r{5LV z>kKMnPTtT|=pJ&VTpzo?)6?zA&5dElu83AI@iUICxq0yu)pZV!-P&8kVcOjOfs^x9 z`-l)a02W?uv{MfjS-<-4DEJ{bw@k171$!-@ht0N^s6N;GU7$LEd1$AKe|#Q8`o6iI zO78oJK*=@)>b#KUH+bgKL5cExnEf|WexVQ3|1CDh_(sWBcHtKRiD^vSf=S!MCanzS zf0pNepJd<8o;L;GD|Y;EjXhW-r!i^vnDB^X!AC>R$ZgfQpo>ZFVpndIE^R zb-w#_;>qakA0_X3pE;RdhtMc%D*@7o>wt8244|)+n?SCWi3bgUA1ny zTT_W=b&8j)vwcCxmHw56DW9_t=`>@Y{pYw+5Z!2v-b_Ck0^4MO&x69)1z(sOl z>{*%!H9YEZ08BLuFiqbFVQmJHEFBL3iecuI#Pab>@okh3&V7( z%$@SuPMcJsRi!H|XOV8YS9Ph8N(zb99MKjA?d>W%TR_mkRLy0=M%C9xqQ-{DdMYN7%mUa&c(80`ecc#y}8Whf& z?1&NKA0k~P*^`K+c`zlD{$)X9Q&IcdW+dw83NIXZe%LQUl_T%_O;m{*6GsLQQ&@AN zz}805w|kEPG}08_TB`;n!n5o^2~YFQ!L>!+HctW!>sreb@GtO4;`kS<}6#ysji6=*Hzlq`T5=L4Kut4pu3tIi~3G)I)ujq8D7*$Ei}M%kPR$et~7c$`6RCd2#kkKTo7Hf& zOMDb0SNJ2YOXWYzhipR`7i))HWdwG{w}S$LuksDz1?e;fu_2ZzeKX-N_FI|Bh`;C= z*XV1|*Mf4GRYuu`;rmq44!9%KTWVXfJU(3~(&7d$YiS|O0k%wI8ybLcT zAT^CqJ`=`@R8wZv9fx~0 z=yo~15xqAxvI77ga|XsOM}$iT0Yg;m(e{g<-~>gZE^@*i=?3G3n(B~L`Z4W9K*}6A z4%+4@+sAd+-8M=S%8@Iyw~inJ&Mu6d)my&V@i^vd`XG@)_7$$*Y5uIc)Q*QiktWDh zg!1kqcAGg&!aum}AARRPuTY<_=b1_?s_%JT#LZRl-)@f-?hp1F-EJ-^*93k2$kzmW z>x%ErDfkF5&-ypmW}R^`Egrn6U}Fn?UeB3xzAi5p|9jgt7NkP{8l&!f!D5#KF66S| zTLa)e$)*!zs{pGD;ADJ5)hnVY`}nZC?a6*i#L?MnAc3 zsc6blU#tdwU1`Ox;7dCD8@zeooqp$ldP+-8CtH-jEAR77Nl5mq?D9d=paG<^C?hQ| zZ5}xW*Nu$mx8-&wTYgiSd+&(3XG)eRk$y*ti(T$_pVRgV-#wZRJT?q}w%GMO zMkqP%J~@p_)4;0tP+_?RSKp(0NmO?2{lj-d(ho83k__$%YS!~nc|io@LujuEMw+=? zYq<1l@Ci8ZK-!EY01gb;L7~~)+-5M$W7~5{%u9x#z=e9l!foP;{*+C2;3u&p8nLHh zOtHT>4=5|9UHP;k1jOK}2UL)755Ft!mVMVZZ&1<&CPA|L2_S6wY%n!tbuo248`p6_ znWy}LycIQq8ad2YKX=+nXTE$_+_a`xpsigHi}G!b?1xgB!M`p!hEdJ|wVxKjmGBuU zPBv64pWuNpY^I7L3qBh^#t9VX^*|X0PUwZF0pkl}*8+brOx{`G5pEK(z@dO!k3*I^ z)cw!Vh!5kHwpD;-9ZSwgR?zSqq)-=zYd)!?McSP}keebE$l#mb&I)PV=1+)1tF^vq zS5#JX-HZ4e+ycXNUQp*FVqa3T&XyEs`QW{jwy}SeE=yhnv5D6Ues;#vV>g_HoVh}0 z-%c0*F9o%0hyQAw2b)N529`&Wk0s>`X!pDWt8z0H6EbN95pkFq02EeK){3swxOBht zVKs4m=?9#@wJLxmtaoM-w_Y;EO<}*Q3=EW55>)B_ z@omIwQlWrN$OEzDNUL0FiRtf>Sh)hd3_oG9=rmPB z6Dn29QYw3I(#+GZ*d{LD2sEHa;_tqLf8f?UtmG^G}SIKI>Fp1x1tSW>aZn|B_R>6HBqD~0Sr?08lBes}(E@-B85!0tqZ zWn!{=KLv9dtXOr=pSO9yb-wa^j(Vs$J18UH!1sU~Pu;@u=23;=VXFCmF|$6kSKSeP zpC@#doKIBD|B(F2kT#tSdB(k3Q3)|vLe7TJo9yjj#*edO?YQOo`kULUy{u)*LF=$b zMXo(5L)jZOPLwz-+3F()Wy0$p7f!G`1lrI9)T8>FSKn{C8X>iHtRk_J2*c~~29=SmQ(YzydbP`Mj=BQbk#i})$3kT=^A6eN zKr}kbXy&c=CgQG2i>MeW-WctcYua}v*d%PnCUHg}BO9R>j2~7B1Uf)ba*iz8W_LkP{`kn4f}vqn9hdG)Nb0??o4ZE=+vXj84KJB& zo~LfIplM7h#QNT9;W1VhQIe?Bc|3{M`K}T&Rn|&ib@EsnciRI{cH{U{ji!dzw(;a} zOUYfV&BDLcozYIU2m)J@y3MnP^X#5*_NOa(g^zxa1@1oXcR*O}ej$}-U3?~x+mD)R z2ODZ6dhr76q&d?5C;FYPYN-2U1s{G(eV)sgWe+|seIO~SOFaF^R~#hN!ZQ+nD)*t# z?9FR`rI2hX+RN_Ss>>i=y^X5m0K>187I$<{&e9C@3d^aG43{~+$=^~g2-Ddl!z(!1 zB+!HhCn60U23lej7txbTj1tW`GIa3Hmys6v{<)TvYl}36+a&dF=RHLDe)p+53o&4+ z^7`j)SPWC$Z-?GkrNSj?I$cV*;jb!H>DbPPX5wWj53J^v0;CK4MA9x}UH%`GRpmz8 z`q02pv5FRL({xb)PM?kq<%v68r{8lfI{9vv4pI*+dbfEY{|D$jVURK?zz4BB%g+AI zTxc)?%`p-M%|;>`RLrOu$jVp-_q`YHI}8r$d#Q1mqA1to$I)j7MaKU}dELM+S@)&@ zH=aa)XKMK^UMEjC-Gu&KNlB-ZkAB|HKjKHK%>r!SA(v!FMs!@2lmuSM8j$awlUPHA z#8rvQ;gzerM;M+L71mNpEkO=7+KsEwATnWLhJ1nc6vjb*TI|rP!aStcHT`hLt5AC{ zSvFtu#fGmc9H5ou{rU2lkA{_5N%Bz@9MO|ZUtsWnI;h@)mKBjITKQ3OQ$s`iQ0K=U zqXua4YDa=5vgJN8t2vIzmCS!?9*OlQzKvSr$sJR{}OsDH#nQ znx2J)AHOcKV-ffU#0n2Z#WGd%D!U+=jN9V%rPrA%OFD212rTUV5{=Q}i%V3@a8I~` ze(CO!FHvg8>yO!|uX;!|u+v8zIOzt&AP+7oZ zj~S3I`wcdNvfU{%*|-Tc9v?e?O{Dc;>{8wCL8M{UYbc2whbDeAj} zu?1ffo)9fRhk17-HqR4g(zpH+2+F4WL=Np7g@)eQb=SSZ4y!z76gs1s(Q9rg}SqK>LHwQj{nfn^{cZBxN`E+NQt?2`K$d@rCYgKHd zkNh*3HIAc#13ap7q~&|?{SKQrhGI?F&P3m8_5O{~L3rikI@lbUBHW{lEfh%oe-fpF zaA}Z0NRr3_!PLrwEdEl%!e5< zt-teTWd@XWNeQ+hk<+QR==UgCQ@HT(NSte6S16VgJuO?Icj&+7jsW4mr-k3r-4WDX zpIBshAD|_isnbuRf?htkutQrUzPR(gp~u)*QqIllxKapfM_Ls~jgFchhlrA!!w+AW zHurnxgVW<4!T;gODjw2MV!U@(w3$jvi7{G_W6CIl`o^2Q9t6divn9S(&(x~ZgQ+Tq zL-E|v&pt_QcDHMPH9WO+$6RZrI+Ok{(Kdw|Rf?wOTti)cXcXLcxh4lZ+IL0x&cKQ! ze79rt@aB#Hdum(f*&u0sdPg#r8YlyQjx$G%}fvDL$afz!q1E+{gTR!2yXQ=z)J7^wuu#RalME{_z2 zPmGI$OA3r-8XB-wl&B_`8{D@eIC~g)$uF%18d_|xq!>471_B1dN^EK)60_d*fUf&k z^!GH;nRki>mDGPsY$}O(t^LYjGT@8R;Jl~BDVZqCU}}q2MKw6);3p~|Q1m%g`#IDo zbgx~lv+jDBVgfp_jTFK&-6}7GHRLqhMvYE3&T_RF@-hXtNRHpGll$VFYo>dY(}Xy$ z_I0FMMim}eX<)el8i9)VvPALhBP)Q zsZ81-0Mwe^nv~5b2r5LzZW0Dof77d}l$1G5OH1R__|E0%fqdT>urY%gXpyZ`_#J<& zTc+tn`sGdSnZw{=f2sia{re|!I$ziS@lMf?MjJ?H0|Khda$TJ;TI)j;!>;xB9|sEbi@Qx`L6( znr}ZXjX$GGsF4jU=7HKCH>G8&I`xn<3xJ+wxo)LJSJV1d!Fg=kOZOgE-M9!+lx7pg z6}5=>4S~q;i=s1N`~IJ?d3mPudbwP6xKH3Ti%(YUy0Op^ywGXM*`dorws-3jGu;Z^ z$dpU0G4xSj1g+w|59R8KKsf>0jI`DnYY2OdV^V2**g98F1eB+tfOHeNwMma|ml@c+ zjzO>&Zo(l8Rokw;q6~V>_Io?fMZOb^Ss0|q;%i3|Q2*K~^nTO9i^QvAJ(x9i`cUuS z;<+$^kngBS>=*ayi&U;L z9s7vxlox947*Z;ABLFJ&K+;ezo68qFw5NK=l9B@15)3Y4Lsy9&7h8k6s_cO}F7DGz z*B~=KYl*kTwTn{e;}{e;OGA^Z0A3wIZDQ7rTxvL%Pu`xxt$%q51QX=El%a{BO4J@`sEZ(vEAEF#dZoGKhx#QL;s0l zkZV2uJE@Fn zknzQ(J>3t2MbzXUU$Y943pi_sYa}je2PI4$c8TevzUtI5N6b}sZ5=}XRM32%D+#Vm zq0@vQMyQkrh~qPSQ~BawYZ_mrVzKjC54JfHU*hz4-^QPfCS*qfM&wq(hq%6}p&e!y zNAcS>cU42{)Mb>#9RXc}R=~OBdN2I&DbdG#&sC^^t|(&xHk|2#oZR%UG&zaJvr@{AOqebC{; zEB4q^C^VCYYAeMx#D8l2s0wWq-$p;@tNYgbbnVgP-HCQOkka$CL(iv=P{mOnsmE~j z)i*tndj=dQZ0E<8JlVnA`e&`^$x*18xUxQ97$W6EHsYf&+@YX2EBlk_iMn1bO^&r~ z@j9MjK}~d=LXd_r<)!Qqz)t!9RUvrNj(Lbej(pi!d>G`|B@>z`kN4Cryj>J_JZA7L zt_9rgI6uwXBNnSNk2Fu-5y;N#ZrbKqi_F~e=Yg}$dcDMUkDO)ys*W`vFx@&T;HPQj zN^CItM4=)qCUXiwUTej2o!(<*)+H_V#Q?q0>+jzOUz$H(UTL>^KB3v^7jEs=;X%*~ zYwy>_?EicU+1|7fUk3eI3-39Sc0KQn3T`luSCzttTm05BdU z#INx`t%oUTJb&au_+Lr$3bfu@N^$XOqZc_aQjB@do7}}qz_Z<`2R^dk}Zv+c<1*32WX(n(p>hKEL8z}C?wA+|&p zsk0=04}6C@i-*ifY&FAcUi&o!OWZ^Rk{uI?Aw7D-suSDvn=h(>k$$J?DF5>P3;o9` z1pkM~*^oy|zi~o>t=PdbF)$+yBOsT64JnvoY8D*~NA)lnds*ii`hdWnd?)!`sfSyU z08-|h>e!SVcJuwe+-=6m1u!Ybjx*1}MAo3%SL?i*C2d@qI$dZ7-hL&jvHrw+Vin@) zlnm#vnV^21SSJVhB~ubBONF4}wccI5|H6NAk=8w1GK3S`zx(<4ghNLNvkOyQs7rk zW42Ei#-5h(mfbyEx0I#<99gLLQ?|(p<<^Oo(X0e=?I~ahj%=an-;~N_{KeJpMgw6NV9MOR&4fOtD@$#~ z&s$272hX#ntPWm)-0zco1_VKub2EO^#|Iep;$Yp$Xzk0l}67+sl>9+IoY6Cl<@V}+^IH&U)9ao9^gjOtfaY3E1-sMNqFgnrgNLWr zW#-B?o!3t2_7L^=&J{cc-r`p$3xjN63cI`OX;uks0S zFG&@?K0m$uXYc+W&!Uw8wI;2S?Qx6mK5H$QO54m}bp~UBuM8=sk|@${UZE>zH?Z2$ zLwl6u2htVdTwj8NO^t@pOVT)RyF0%)^Kq|a>I!ZKCs>X^8)(mUT4VD zBGvutz6;}{5wBU)L4dn)D+B9VjUh%uWqOFvk@@!DO*!OSeg-G~e{Yzs0>1B-)uRUb zS)>g{QhTF6-1Gnu{0=y^7#m;?Gjw~ja>BuDfRuEF_d_veW$?fOoPD!B-g@o>qV}C# z-%79(tLY>BXpRt3;sAR8f}s%W^Yk1@L@}qkDJPBZw^090X*h~E*zo(?67VMEm-z zf%?ARC>7^@_hulGJbEbzb5JjJ9RR`szAq*E_mA^qAu;GDz%&OzW5u?gJi24PwlQsR zUwg)$RBj4RMb>1ov+%X8s;Iw1D8|jC$Ne#I!t}Z4{B@`};;nA9{mL4H? zVXvxJYSx*TH?+I!cb_bbFX-9Eu&$C8u%zXzNG3rq zQ0FZD+Egv6T!V=Rsp4C8xPbc=%5C3pPsdjx-mi3w%qlqP5lJelHM)S75@R@YOjgQs zM90hT`!(i4r&UH4@hz!DQsz*LPEt{)3}bj69L+v!mG`rdo=&6VBkHj0F6{0wWv5s1 zs6LqUGKDUDr7xnl^eZ;+u&YJ1cyJsQ0I%2^3O#PV`dFrHYvSPe1)9Zo9kiDoXM3(c;g)rip*)$oF0XBug2~$1Aou{1Vnby^N<|MY~EL)$3f^|w4i|CDEb%L8zo^Yf{ zz8)Vj9!&;lS;88J#B$6oUV4>R-!q=ns}HqT#64*;-8K9;HH1i-wjsd4_oHPyrLMpw zuhb&$&1MY>)SC$;kGCoK&aWbp%DCw-M8D{tbx#)RYvT1Iv3kBRX9?0&cR z4&33KfBK9up^S5J_=^7g{P@|&>kigV@7wyo9o}jhL2-oHvACWpizBN?P>h^ihPERNil89Y}x=x{r zE%Loc$l`wm%z`9sHwp(@Hk?uw29c)Vy;$WqdgHXKdnui?fVIq;O-nX8)+{Wd-_`eq z!yi^UJ>eR+sT>5Dbv~kf=BeD??e(m^D|TFuwO6=xr8*@(U2BWk(XMHtbYit*!|$8| zBp1=}^WSb%&r&ct`i1g_=Sc!)Pvw27c-r1nygC*x^&Q2{O&AuD1jMmAOJ7O{4}UV% znK?!&xE9KS5eEstCkPDz;nzS}X}si$Sf6(A5G9_nzBXGwHZh=RxaTta+~<=+xUsiuE$Wb0UUoI&;HMc0HtRQSmZIh` zM9xi;3j3FG`)#ZeYM#0)aG0_7zg$?u7q|!($a6Rg~GCqk0X_;7w zRZ<_$foP3?pPK2#lTNRiDO)UxJNkLMS+lJY5p75rf4a+jqurNbt^z>->S`=)6Fim` z*&K<&TJW37fsd0^OkEtGKv@@H%t~R2T~OcaqxtKf_x-9~@B>)QJUvEGmRrg}Qy}H4 z&gUBMWX=MBRRljU%ER^llddRMRbtEsC@vADG`HtBsw9?Vp17d8F=^-z#bXpgR|Vj1 zN}zA=n!2RxUjqXHP913xhJ=uO8a(a@BUqD@J?0lif*BR8+~R-**oZeLkVbje!s22{ z7``|zDAQ}uQj+P)UXDI;)x?KfA5$mQR~i#p0)?D>t=ex(Acg-!$)``$e^kytQ@-H4 z)Mi)zSVCrj#1`hl^wX46EQsoG5XPdvOO6vpwd7iuchudhs~4fzNY2fyS6AD&L;$}oLZa(QxN-NMn8VciR{Hmn6MzMtp58+*7GKGj}EXXJK|9J?|aes{G};^BiC zb%W_$pR1W^zp$XhVIo{R=-jWCr!ha_&XE7QOPfk%tT}tD@l@;7bm5e8(K0?!^3LRXJxn5@{9Gc3Do^d8`-mT-NIw zX|PP0&A<=~7`dCJ7@wZ zbk4Y!AQyQoYYN|;zrLYiHFntXhPanKtlhsHNym-eq0TRK`idAC8#5KW>EwUR`EILi zyYrSAe1;7UM=`}c6;gVF!d1>!AC{1JJP#|M6tN7@FiZ2;h1C|G3$k)o!D{ub86| zsM6gRb7Fhh!EKt8k$fJFgcj1*O%3Ppb63$iV;x_Hmb>>@3_^Wq*u9^kA=rY?Eq4}@ zm|4`jw?K2By8=_C>{)Ghr9-O6qrP<8^K>uYve9yU9b*{@>h*P!U386-_86^R8cC2O{t?|;%R2rXThkDd$EQ&;{ zk?b2mnsB*LitJJB0}vB4>Rd`&XhfvB2FkzQmc}|~{Giza8blYy(ASgi2KK3_prwkB zh|cuU-Pt4GlO)dOf~g}6f8HH}*2;>mOM@I<4vm(&9@E&G2+S~3x4XGmYm8C6beEVv z0I~078MB^Z=h}Q_2>{NYL~?Sp`{k(x!xDS7!a@F*T>@@Fx#vX(Xj{ zq&tW1?hd89<2>*8opaXW2aEZ|{FvF#zW04yOf0TU`q-`^GKHUryEeP%zvo+6^8WEzFC@%fYfFz7^Xzg7dT6A>7_5SZw6x?p1B+k-l0L zQWz{4Y*5^i&*_oyIJ|(au{%kR%y3E1A|l0X1(iG!xA3$Js)30b9g;;Yx|IhSk4(wz zRMv2%Bx^X6Woe<^5HJ%FH&g1ojw4{^4ujR#cxCD5ZO}nuKyIZyydm{gg{(IltiXc2 z8WE~d@m?JL@Du%omi7l3A5Xev`7#x;!_F<0$(tTBq-cdR{b-lEIG*iH3@5DVL>E$3 zLO`8tT86tIW-Z}_qozdQTbWS1y((Xs1YNjUaKR^EXY?{>Gbzb?(slcK_~Xt5dqMCm ztergIOx`ljb5YF1K)}4HL@N*k9WiP#@k^t>%E0n+*hkLlQ%AYAW5n3zqjgwATs~f) zrG&6WPLD93UPR%%C~P7b4zoMGekPg*CNF=tO6sjWzOmE1o{K#k2T^*e8vvH`&j&GV z$+F|tc3}0U*5{q z*vaYs71y#tY#&|3+}QC@n!=g%uKVRWuLa-JrekD=XlHlWKzK}3^j~X){{hg|>SF+Cv}IC$9rLO?pKenlH_6=)6r!#PQUXb_mE~(GBjEVTJRxlS1}k!+w!c8 z0*z;*PfMQ{!=$$@b~$6w#XmAVAphk!62{M!Bn0>`eWwP7@hiz-ZW6lQ;Va7pE!#@md)K>LBWyhxC#D6vJGA5u)yI1N+Xv zR6wR>1Vd&mXjPSa6q!6MEEY+E_`KVm*PDTVR+6;<1O~wT=Z3-8fJ;Gu+}vovW-I~a z%aqY%E&TNcEhgpXn~4sY5gdN0s|)F0l^&*8~8fTZkDhS?NkGtfQc0er1Iy zGt&;el`NX@E>(Oz?H+;`Hxs+)zj@O!kI2RfQO+qvkw1@@Xa0bqh=R%TVaFVa0F}S|f;`1aj5WxaV3=%t zP!WD|;tx}jaJIdmDC>OM7yhCtUWGdM-CPP@ff`2-e4Lru=6&4F+}7;V476<`L5gn- z<4$3j8>!xYN_l|n)ek$5x7j3TomoCDBp;l53^vxr9nv2))ttFSjl?hh$9Z^uQTKhi z(g=yIp(y|-@-Ea=7Rd2-f{j=JRt6_N=M+Qw_>b=%(?v?8{0`X?X4+t#90+T)^=w(O z10R3_+q<~BnpuMz_&wE*n$|Aw3Jw1oejKQ=V2IwP@RNHQX0HlPRQ*ew$ljw{s$FYk zwKpvM?ce05J=(|J=OSgZRWob9MIjYD88!BY$=!5FTHRW3jvwn7=LQQ=(@XeZ>e%&U zzKoTlu=Okpu!MA9%@#-cnaFw=)r~N)b9HP@qKh3fqS2C4{-k- z(H*aedd~CUG#d^F5N6qSD@}hFYngjv$P4|r#hC5DjToK=ZQjKfk4hWM-uAd%7gvJ` zjALjQQ%z}ph8?1W>(;|_nULF?5U-sRl|Ty#V{nrIwP^do;7fT|+$Yt05$|iV=Z$*U z(KNm6k}CpIGNoiaJ9gehizF#*_uVK;A9%f%SX))p0yxOT@1A{mESE@-G4N>|6O4El|SAOjWswv(F!TpbVmqd zI+#p-D@&P1weIEX%VhU7yxA1d_*7TnjY+e zDP{B!?>OJ;&FNa65uMje+=m9#2+9+ep-`KB`i$;%y zF&wrIGTs6eXlB@!!2a$vQ|5R*c7gXSvI<&#M}q>np=O*FF!F&yzm-RieMnSh-hZnH zHgqU-x~;Kwe0`{jkay4Bahh4nQ3dsgKGs4}5lz_pkU`VN(MCFhUhS#gQ@`W1X_WN( z7M=@P)#v0wovxKiW&rrHDgJX9E5DDYINLw!U)srsII>+@q!9qNab;^OiEV5wr-7T) zGz%kyURyJK(lN0m_1f{@kp$Ya4)KlWInLcef2SeIyA;Q(@9KvWbgWYIEcKzRD`n`O z-0Xp?tTNfw)y5WTj5ujC-#$hyHcqstQ*l@9hyT|>{io{q&r{}vD*DN+Ybt#Ypjm_9lH+URD@MAmtlSDqzo6*S_ej<7T=CDmNT7|(u< zd}1e!vREOa5<2@kD+jP1hCpOuh~xxkzfuj06XE;34&dOoq!Qb3_tmSH%da2BRaw}y zsN{D_XxgI(3}9sE8WjA1!@V?6!6A(P9!Yh%XgSGg(~_c&ix?@e@p@9 z8RuKcf;b{n>)6e2*(rNu!SFX2k@VYM9zY%RJ4Y#{2*bOi{*;wf3iE*ypi3Wl_e-!4 zF!cFg3yhf-R`Hv&c@eUFCzm5v>)xwI-I|_bUO#D}6t8H;Wx?C7Lkg%^!u#8%Lf7Gg z_PA2tIW}TdE~|Dx{Dw}v;cNb>08%thePsp#p^8zN`m}k>z2*g3AsiJ;!ni)6bc2EC z0@*rKzr-tFZU)YYme~8@MIdln?Y!Fhy^un=;<|MIe(76La#MYS3Kuhi<2m$kaTzUQc5YBGj$ogxPdo+ zDR|pA7*0}D!|Ra|vk5p832KUYlcte-Yv2s;3Io%|qP-ZA=R8=PorGov+f;Fzk3-{z z*xdEzvF(#7L9I{kymyulgf6(q;jlP;I>Q2555eV8o8F4bI>Xa=A5e7jh-h$Q`t{c3 z*OIvUzrv@PcTJ>*8S+NDKpXT0aNU-6ZBn02DO6%Xd>OBB*v}5{kVbYq`Hn%E%Q8LH z0OgEjhe2PcC8QqM%}u2P6eojrUrC?^o8YCU!@?YTcczNLBxANqhwDw+Tr7~pddWypQRi{&=Q|9;15s>fY|0?_|Qof zBNkxssI_ONHQ##9e?p4N|CKZuJY*$FNtNq5E5!IEHsZAsseBbB0btTRGffQ=Iv;*`o?XLuF_^%1t$ijz~yR4`wWfiRhIuL@JYJM|3@|>!(mJuzV&tF#W z3e+u1sR-LF3dB~WTI2Cvq;$*=cishg|HAw0B_8WxH>>!i%pvX7!mBN2@yv$7he?m< zKX+iFNfJ{(k}*0+xFh!S^WOTNA5}8D7tLGVk2$OX4udPCzu&>Fim+g4vXruQBGIu%3Lv5yY7B&83dtk&VFY&(r`TYWj7%YIk>f{T;DtxTe6%om z$EIK1u^FWy*S480bDAAFA9fdrL`3@VrQrdsYi7=XR;<$N7Nt`z5e{6)5rMect6=#I zEOm%beREAfaDg6*1>wDmXf?sIr4uUHl7SI|<-0I`h!r!_csF>qhA^ z;xl(|Xzu!zG;UV`3SIdv$6BK1G7NRD@Q5h)PpJj&1PR{h(7>iqv4J(7Wn(0L5vRNg ztFs&;G$Kme_dHF%j{v0tweF8RP83nEwqYKkOh^H06B)X-#LU2?T^C}wANApw!0)=Q zDQ>28-ujw#JQp+2ETV4$7&P0*0&kA5PF4bCx0l;?V@uzB#4eG{+oBgAs z@`DM_48u&11cA8|+uwfFsyxX|N{CqGydMEI6NlyGyE5HI<-K+5L7~BR4Z|r~L2hnB zLD$1^Q8FgAl4&&Ob5RQPf5kQo-x=KNyIhY?F8@sdy3U8-vF{KOx8vm!hQjj&^=Es+ zwtBzEJB@dRZ9R8UjvDYVcZt|Z!p{obLuRU#XMFNVXm3L9ievTiB1}j&6Q(iP*u%59= z-Qs;HZ<>Mk_C+OxJ(sn<@qgmi!53|g4t`Q|Y?j_OqC5wSC+@PW1tjvqy1q8{2jhdi z6%N0Twz9{1*v>%k8o0sw+b-M#Y2WDPam>N{fTe*#6Jaio+`({(;!t0NxbTK0;3h&3 z-SL?Wf$^r}+U2|F2JD8ZKEYbEHyjQz2I|jdKCL<9_CHK z%R&(eS80M?SM6$Y7gQhm5YCQ~2b(4=ZuOu4IDNDee$>5Qf3R0KEIc$?`4f@CUxFEW zWgYRhH63cPrcTP&!dMXf^jhWfO=oZpC5MqBv+QHpo8Fu9I1C?hL*w>d(uH!e&u*2W z7(V;Pn}t^p7e!}T>RD6*G`{W>g@gZLr_DFA+}FMLf3}S>B%SH~H3T&*6vO_IPE`jU)?*XL*$2X*4+jquR$sOl`h4iAUPVn zCUwE2`vW0R0Fhqh%d4{A;y+B9Z*wK&t)!@^a=!ppbJ7+qHYvSH^pY^g2^s>Y7R!GS zo8PQFwk63aEpY2(TvD9g-ftc5N0C2nojNt6(}bvw9&p5eVFOvLsrgU4W#W(nUa8fR zm~~Yc+qqg<3_cH8wsJQy(_FSd>#r_Xk&E~flWX(!BvXq!!g=&lIDrlutB zSYf=guG%6*5tTIMybG>l*Fq2&QvG}kn)%VLjI~W?lEg%FL0F-#kvKFQqZ>K%0q&B8o_A6et}iHqcOji2Nl3yaD++j?>Oi zly2iihlmuLNdcK=n}PXl@a!aYapdjIpfNLxx*F_q==(nc?+k$`aKw}$ELdC7d?<8y zSi{CXj-t^C%u?sp&$g%aeMW@q(8)wiGefkUfNZ)ncwq{4{h|B3^;D?y#1)zUMC|*4 zJaY31=LU>CfU5bOt8~8$4pPjMUOjEcr!K9{F5cU$HPA^lsu-E+3306*gTHCf4wqW5 zdCZ@bb~cF()u0Tpolr120T5cTXsZ zE(Un&Uqs+u^JN}^KJa*JV~{0r1gMB0qUL_lial^L`I z=`sGga&#e?Qtu%upEYl!BQ< z*xB_AU5n2XVWojk@78qlmnOVWFAR;dzjiE{1# zetYtW6=C|*%W&8R!1ih*APHDH+xQaR6nq;Vl@`bQ@AEqu>SRrgEsW`{X+0o zQI(99FeQ<+%O53D0~{Rn8O}JK7Z_lTp^LUMeN%Wt^@e4x1t4Ik6ICL#=w9TC=9F_4 zahk;%AJo=R^n&6>NNckgEb4|k9aZ0hK=ZT-*LDEFGd_oD?lF>z@af)1sYxF<<2ALzyR++ zNz@yl`=T=qMA{Jj4|v_~7!;#N1YFHDZ?;5X zjwf}r|EJZO- zmjV(d|0ESJAEaU`>F21mw)TYmKCX@e!e4;vLReK@5}#SC|VA9Ki)d3SxkY`K|3ia zq$s@#nL>AB`LaZuYIVrFHK_$h7o9<9YQJRQTo|Xp>Ajk8ry6lV)~AFpB1&xd zY@u^^!}cijaX!uc1f%UDqNnC)^zRvb?mv?ND%%THLVZSGOY5=bw{grR1p`%F*)KM% zX@0k(bi)frk+WRa%$oY%{p9sjLMo(>Hy~$FyZZ%Q+f9tb7<;?A%s~!wT=#_)t*?AN zwv4CYvRSVNsBmG9`EnNxcS-%393;vp>Wxkzh5MnEm>n~jc5#speapIgV{+n6dVT|t}F z6>>E7Sa`sd3d{@cP62Gd3a?p1=G>NWk6rLLM9gt_orx#rTJXp=v6HzTPOnB$_-kxM zT+34Z%ZGZ`J7(Fd9O2w7X&+v%+382Ts-HT&5J7XBBjESh#8qec6*-Yr+p)Lh1%22T zm~XM6tz&-^M)Q;9XB7Xo8a!XBm?kly0cJwx|gy5$5$1p@9Of{;;_@?i{4+OqPwr_snU<(_+yV5O+HVRfLh-d9N@Iv{q|%%i)#*R@rk1?{Sy;(0P8zy3Rfy=c zl*nmIX*Sxj!`=F9_^$@DU=zm1&7`#t{RvJ)NM!bjMy=(_Rok*5<*5SmmQzJbdYNtZw>9?0 zE`p+NMv|P1-Q>G9())~FdSUbRjzX?a0hCvDJKa^ba0fLq;AU=W^X+_1u(+X3HpfBC zkcFTy@S9QKqHeNYcDkQ4%ic zbP6+}{ThvIkuw(IAS|6+>8U!Vsce+UX`5h)8S-qgnWnT#evMRczg7u81pu=>o=n_O z{ezexttyroPxM1)e;H2CGDk^jny+wY)o%bR_$-o|XSlP-BrRwaVXR7~Q6AEzRwCm? zL|CMWfx>bZiTRT?EbyGNTr736Ung?k7!bN_b~!Ma7y_g<@2a5#WWep@;4p~tkRk9r zq(h@k5+AwLlOKy%M`AV3W=IUv>Q1D$Eo~`9AhudLF@y3AX=?S`yiaNwYHeE!0`uK$ zVZw*>f0m|_J`nsdc|l${n28)KNHYKJ_<$>SVqk@WRc^q!UZ@1=WsZwvLkC9{JdnQgBsr zY`|O|A85X*VcLLWc2Xf>uw9N>SnwpO?1jj@)J9X6<0{w2jdq@T(wu)4Am1^l*E=t} z=^C?TZ!uv*gtB}16vbYzb8S);3Arv{DeO^x7Cxfg;;HMuSd8=ub8AEE{g0;B+sf}T z%#?w}vUgB}()mEVAz!(Mu+^T}t$9KGa!vr8xsi#sc!bvDvuCdLO-;XR`6Acb?S;|c z$Bd6-Z_xLY{=?lNYJU}3NLpHKe}%urBQ4NP^-UqdmPyOI*KK~7aohK$jPKpAZ>chc zT7r@lFaoJJMxy$xjDE4qovcO7v~f+_FOkS~M|JxP`Lom>#SNY0f z^t1i5>9CJJ?U%aHOU6g6^VaM>by6FrH{|qOds8iafA}Q!ZaOrr^!oIS9JAT?HeE`) zQX-ArLVA1S7Y{GaKP_D3U!*n9i~0|4I-gz%;@I0SNLKOLF$U-Fu*grez{ZER8N5oTV)o2c_4kbnUs)(9;52xUhQR9U(Ykil)`EWaaL?bTe4hc)v-+PS z0i+|8RO(PA5-xOx{G)(?KY*dciLRP`WA{PCj15sPlWY5f^_z>}>v_)=F>RD-3g{bi zv9e&7xPM(B4K-Wlr}76_mk|(fFHTlK=0F9Fj4Oo!RSf9HjU<5S%>oM@D6P8K9(OJ? z206@=QAbxF8|?CSq!=9!3DF%(9vjR-E((SH4lX92zN80V2~5)Jc01VxIy;p$l2AFz z>tO$HZkOn%_xAEpM|~j-hg$vN??^h|%X`2gpeSKh63#V1Vu&agsJ?q^J5!RVt)?T+ z{qQJkYIk21I1z|~xqk~*EBWhPGsy7wc&0RY(SV>tTWt`z@nE*RyF#y7ENTy+v=!4| zC$Z{G)22Bj?d!}A!-qWq4Z)(@Xodi(fYa|qE9Vo*(AS+;2Xm|n$K4W4awzOXovN4k zEGi4bXMm7%Ztq;`=BJ3eU(I*{oGYL*a}VBF4OAayKENPBIC^7dB|hT6JG9)umn zSqU@e@bU1|qsMz5(o|ABBBuO;X>H8(w!;@gvb8K#N_$Zsj8+l~c+ZGA?4*lmMYJ4dyVITn>~$YA;;Fm7 z`yF<~(X|E6A zjlevW84tC&37O5m!MlEpR9U!lm60wHr~f|L9YZs`iC5yUawUtkeLxg1yr#R4C!uBc zpyli#-_U5qbDDTVMS|URH!lwap?FJka(zie1Xa#kx-2OIq!&_`T>&#$TaPzhjvLE! zx7X=<{55V!Nqz!U1gQ0$(HoSoZ@V-s>9aUX#4+6wt=8+FHhiNL`f(#sVbF?0$JuqW zl-YI>;+TKISJi0Hfm^nDPJ2kKt${z+noU=ZkM${hSt&)th{jBTDilMq$#}T1oa0&r z9H9f%^^@4#P2v4bS%765boNAU{Bdlbbh#KY^jk^rv#m66o`wjFZ$iD-6;T*Z=+Qds z7Z}1Y!_R)hzowC(_j8}xui^hGmHJ?R*0%9F`Q(&i*CfT!Ds3A26^fa-w)Y|bS5LK|b?wG_@a zbD;v``Qq3iBP;1UQgdc^2?V9t za;>_56-9BHc)|XwllEcR>bx5ew?PDBGstHVAuYF*qYH4^ zl#dB4rB z2lPh)BAWz?QL?Bg1?bB*Ulo74ioD5KBGLjS#dozfbJL$EM$|q%y6}^k9)>dFY5ZV# z33&9k^fu6BnCkyxn34K}D9dD&_QEvmmoNg1^3Q^))UJ)z_xIKe{~jWyvuL9980Po- z!D|;zx#F!(DV!B(*eA~tYOTH`yvz`w;f8*Ay&CyJF@OV#_PCr6JQ!mnBJuAE8J#iQ zW-K6|Orh3Hepu{7j4GbzDOssKcP}Ou#JKWAmobfvg%hhMit-h_zDSR1xr{j(B)L#q>hk@ zn469{$!C{#q=@4(=lIa8b~5K|k(y(~)c3dABtbe7At^qsrc~fatg`$MDR-GWkAbNM zlJ|II7pW;vXCC8$OY^o5gDwV`Oa*`D$9&ikg{}vxSJ9~F^h(&S9T(nPJK;-+1Ai02H=iBO&W2HgCUae+QJ_$*|d#&Ju{c`PV16 z0GRj2!(CrJ05>>9&=>LxpO&g8QWWxXM+1m!n@Y-Mvu>Leo9f@t?fUeSwyeKIVyV>v zx1Or+L>1&_-A~tvj};r!Gm2jE^Xw33F1N=d1900-0JtW~2=R?>=42#lQ=r1tUrvDC zoSqLzw)oz?R>%}lrOEgvh>c-I?L%3=N$c?9ig>)z8jy}L=Xmp(n~Gtr$NA00rX^Tk z*e~+oef^JbeAs-ObPgK#%L!*SF!`SXMci$8@Yb*mvtiTa7#tP&qquH~N2bD7kn>9+8O!?1qlwzR2~@!Pb=&9*dD zFQ3(*ts(u-mfmoM*jA!vKq^p`A|B$XJ58jEgEa}86>86UtDgjWEae#EHna(8QV)UT zR7&Wno#=m0`#y{4X)0P1&(M*-KX;iYo$3)QR{@2}97WS$WKJCePS{553x@Q@X?&f% z%|##~rEyJMr|2C`Uf(vJ`(+7?3#JZQ^BtbD)we^+3V;;A6zrrd% z8F>qG$cl61R0VD|qvBh$o{&Qo0DJ~8kZIc%?+BYtcO$pwH2Bax3X3rinrQ;<3C_4F z;E(VYGfNp&MOom~Q_9xR85w_*4v&Z+WN70DdW-QiJQzN#b4cKgv5L@nirG@MK~4%_ zK)Q_iUT7NbH~^0)5%GgIQO9$yUE1svJ~I5X-eJ%MmA=FE<#R{u;`V#&n(61H?3EgX z-kRUnyFA4US=pLKu)<_S<3M0L&cNR$#%yJd6payU`Soaj16#w5uVFlQaluNMwezvd z>aFd_*9>f8W{59NB#$UZK$WQ-Qp_(kIbglUx~>^`n!0a{qu_?ptFM3D;Chw&>=+^O zC1%OJ?7NB;q71XUkBwDG3>hkaK}d#P%nJGo>)KaVjQD2MhDD(GG>-hq{svrTGPFGRZ$m5AxlUde1 z1%D-=geV9dmOFi*B2I^RaR3j{lzDUcFI<$8{5ByglNv#NyBW@;4lG+&cbcFp#rby@>h5EmZ_)sk3@#Q#bSJV%)rLl}0&h6UPpH`fz>suq}Xi4Ka2 z-*>-1=LtWzuKTg!RuSOqL;=0`w&23TLJ8P3q$ouR415g$u*TC z)3`i<+L(V}P&bBJRCd zK#J4Xot|i0c@HEne!q=Z`Sp(@z zDNdP&`fk-kPF9B=@pwrF!bN!gK;@I7wRiv#t$%1UjrINqn{dm8~CL*k6R zxJFZ}6D`fgc1%2FZKXe5oL#jE@V98J8X)&Rm~D}lM7xc2be0o18eV)nT;N9#oQq8) z4WhLETv#0GRz?97;QPo)a5&XxhKhlkwGkP)*ezQC`|&V1G2aw;2MdF4O&ZNt*&rve zlEq9Ugyy?v9W5irigUFe z&E7o!c|Jp!=RV@?LMl`#QQm3pRcfm3n#FDnGN~4GMydnX{pY_*E#M#~?KYvvpUgoL zrqZ+q5ylQIy6CQV=Ur^}RP$U;f^T&yDEc=I<5;d;`!jejIn8RiHgS#$%X1>P;#dWu zxO@U%r*hFjXUlHi>9`_ASNBt-VuV!FHtV_8W{&=tLeQ!<|5U(|dgizr&AAKp&Y;he z*s{6S`py`Kv_A_DDc)+{H~=J`7nh%FKVNHv02I{v^xIdYC$rgu^9qYk01VzX`Llmi zM%Ef)78f~ikE3{HrM8AYJ@^>hSwbt)2H;rLJGU;uil(yGR*MQi-Bb)HoYwvZgwHu= zpWU-~K?dOX75U15hkv#Bz*CSnif$~clEtB~H+_q|wm&QyYJqJcVSXHvov1!)Uf1#P zbJ)#_%UglKi5$1gbw>3ilTQ&qZbm64IRIJc!vF;+1H-zQlh-2}Lk}ZFpdAsqXA82k zBriV&Zt+}3%S*5e;wv{`6*k>u4BiK}u<~V~nMw?c05ngSL5*1w>4E#9&gPsnk<*SdcSHY9r!z~LBzMF+@AaX!jgys{e*8gP12mqkDB z4FUHsv=o%w-bJYjB{nKk{igZ1s`qX=I{8F9C;WAL;5f4?G1`{rJbem9Z(Dfo40llE z26G#F>vnmg#~W1&0$c(@g6q%__W>6LHC%(lw0)>P)OYve0zu-q+9P@+p5u+{y`1T1OJtzzslnzcQy9a?gYC zN<-_4yMXOcBHRos&;rpUmX786!ZkE2d0L+Y_pg+NjS|BH0WSF`7^3+bnGjg-r-eRx zApfUA2!@O7t<#GeQs6pJJ*z4VwGv#DVOn{xp6K=omzJd1mhi`l$Jea2ngZb@DY?s; z9wvo)B9Kl}6pEW58iceNB|Y}yHN3aZM}S6!@Kh2IGxq4u2$WetOXBR`ecbeveOK}E zBh4w;=!~Z}=Nbd}h`@D6LGBz9#ccy-w=*2&M3{zQ)h?T>xjeM}6($7xOLeyVke;mO z;Xf>M9CqC*F`#rX*6h{!siL^N=JM+Fg3oo1SaaBUmcg7u95&tNdun0;R81|eL(`K< zmW6<$HU+@1u9?_-%zt8hSoB7fguIAl;~3SvB!*Qn`TGsh!@n0oWH^cJmo?{qhOn9V z`@mi=OshcSP|TTOp*l^auwdl=_8U@^a1b8Rhe1beT;((kvgv7nI>v9cqcK+XH>P|*PWjz-wOP_!V%q6{)XSS zd{Uh4+CR88@4<_#R8LhBpDcD~@1wh!uVKHo(FNzf)IXIYgRFY)(`L3mKlq&*UY`;y zvoRX3|AF3T{T@y@BzGkceR|A#>G$`#+nFn?b!LulUL*DK!k#_f7Qj8awVOk9&!9B{hUKv> zAdtj%OP1t+8u4_eSc)9--E(-nwPP{(04gPp>DgQesYg!uUzbJY7Rb@(3LzR~qMS;}-X%PvlNZ0(ee;lG|< z(z$wydpB@oC+qo^;lQE$clscN-c*sUf-dSjw=0|fnxIYGd7oe`@YfX&`&vLH1Jh!bn0c+T>Z1rzDJ`>IlwWYCaa_70F(Qj#vXZcB*cs4b? zQ2O-%HtSd4Zx-v}*H_NctOvYo{&pjnmfvhL6m~}g7}Tos9Cr>aqz!eas3`XZiYc$M zUGpKoTHV;%at7z|rLG}iq&=aG&LbyS2S$FVj>Q8KH3%8Z=( zp+M^wV#TByG2Rf`wfP8&Da{dsgE{?(2+NhR=(itCBgkeHTL!%hbsJCm@mDX(w1>f8-jD*yAjbZA#71HYYp{8Wztpo+CzC(=o3#3Eb03DGlSn5gBn?Lby6*GSHr3ZJki?& zUmqY->t};NQ`Gp5wm2QL-SjRGHx*zWmDMv>xh4n~qV?+SQ9?jl_5ctj zlCR%doakB``S^L$4Ftu6JX?dHyX?L<*Yx@#jd6eslS#Ifzz^#^UmhXhT024BD>e~M zQkW3}Fh6}ZzBGNOkJI^U-rC)XU8zsRE8{n4BMg|jTqThIW6y?tjj+;t+FoUC@%&75 ziX`;4F=TE``Rsf~<=sk5a_wt~@3$A;%++@0IjXI@i#~VR^ncS24UEuAsgNRDXWePF zz4!lW5+s#IID2+=i$v!9| zf$PDFj;G=6@RUZKYr5KB+V;=CO0Yuj-62n;aY=A7nVH;o?~pmkB2L~8e%?)dyB=K9 zFxuyPdNrDAoX$V7jXi+>wZ!!oj{*l#2kNb|7K^j?bhAc~o~bQTk_#(_1QaRR6uTsP zXTC;@B@ao@R0K!&+)WZP)9I@U(J}lpYHwJh#wr_SiD-lf^`n-6lH9uM`d99u)E4e$ zfC`Cl(|tm|EX^1yETjwN;o|7F{SBlU(ekDeX{*Tv+8+ot1xa;cZ?Z_Kt&|A3aQ~T&=J{=w0%_oC{S_4Y)hBA4kBzV;bx*1&Yj}8kgyqJL z2`rl*sYw5`l>%)V-f1$s#m|Wx&aVlG(EVJwGz|oe!d7h?g zKy#TSIRLXMiLnfjGd!eTFLzi4QO_{kPhg>xfrN<&*OWEWsA$QnEW1EdSnt{_cgNjv z=!o7i_iQ&tPgLoei%Q4}@QJ?yZ?R@4%n^l3D_TA>(aSFa^-_#dwd(7HGj+{U=ie$o zAsnV{%naFWLd8$_T6d|oKcEtAK$}g1oT|qXp|_*;L~~n|9;7|g9LHQwlo;+l?Ptn8 zAwU=mXC42|)elQ54tv3B-sHn-@)wK<7%oz5Ig_jhoVmStZ>)|rc9ZUgt?B2sZ&Ux@ zJ=y%=3FzV$kTI!=NH)CxDB`po4q#w}Of&TZRToWE;DX=&D%K7hkLLvK8Uvls9QK*I z8k1)!mZ}&1-)KWMKg=L69M)a7yM+IFqo$S>JPl=MxVNYgR#m0A_&86(KsF`0VOZp$ zsa)nIpfsM7Uk<;%-^K0T`czQ|@aI&QKU!mo9w_3;nx4jqKAi`ANXbt0h_XCcwm+-? zTmP4d*PFViDh0eMdchIlW1-};eA2WlY{xNE0>SQ@O7_z+8g6yOsXrF&UOYh@w>%dA zmg$CG8h5{S+}&=+G2+-ljcB@L-uNQ1QTH6QtwToF{@R+He~5OkN<**+nl_tb zoE>)iKS#(loF4Id`x(;5Qfq|~Ou8P=yEbGjV#PTdYz*^}uQTk4w-q{`c7R#ZmT*c* zUU#3TN>({9*myU)(Oui>#Fkr&_Vub&!!9qBG|zS{SkicKRl;h!dY-X3%q?`vQa-$r zDe#SK%oBt@r%-8f5BP6gVfGnq)I5{U)KdWVn~-8j!9ho^*ZUvF!|5*u{;xb9=;=P!@BUAIy3C;>#IY+bv_ewUV`PahbpQ}QrtwJKE@wr;U zNjM7}sWV$6nXWA`-c*OQIFtCq<0R2O`w7zIWh#59MY+|K*w>>^#NlGd6HTRwpx3l3 zeuSE;*rbd4ljRq}4A?KwHO6_sXHWd3u5rAy9I2^xG^F!fM5nb%6c~tt?60K{)1&My zUYCbPh;&%nS`mcY}0?T7_It;&HUo3LGykT1T$&N8+m+&}4 zQS@P~glVlVz9L3x#vyFei=ya#X9<&WJ35ATWfTH%OeqDB&q**gF(YZQGksMn-O&Nf@7rad!&Tsiwbl!c!UL$^A#P`jeY zw~?I5s514vif~QX+Z2{EO0R-H#p?O*(YM+4krI$E`$KQ05`@3eM~T;q%+prpQ1WQv zv#sr<*WO8P^Da`$m@Az<=#aCGXQNYFjm2Rt)W5ax7Pb67m_3HGQ0!gT5EXcHG{`Y` zbW}eqqNk;jt-fiYWI3EmCYSH~{hnp6$1h)ocdHetL%#q5N{Pn&<(0^o98lw- z`jsKk@NrSa#38|?A_hIWHDQRh<6|gfT!qX2HLQkFx9@n{KAK;jK#JhKM-b4m(uqe{*pQ==_?|2#|!zX9@kOfALN}ggopRkghNHwkJ}Kt zPTDt4?WUnx1P`rvv!^=z5TXyfQ4p3wGM+@#rERKC^lU4fFsmC6@djjn94B~E@x$64 z*J>RFW5STHgpD8jG{@t%{@rP2)RMTtqo2~jTF?_Hg@V~Z0=3Gy$ z-<{xBK#injEb+{1{?T%X;i0S3eogH7y2Pz}F2pP?eLfZCbGF@U^lmL);UXRkjGZVF zzq`=z3*%4xd;*5OfaM9IuPGTEK&_f5(PI?0cIWzYmCms>o}_%#?U7KGN;2v4omfy3 z+a>*5kpm3#w;XiO_x#JPWJPYQ=OW($5=L*QsPFSO{%TiHs9di$o3!Z!g2)uLXQ^Ww{slBBGM#yguc?+(9lqZL^r8*;kyd{MvBdy&XfRMW8hMr z!oIQ%EW615FZOg9OX0#v3CF_WWD{6nN^ve#^#I!PU@6yRxE4vn?A=5UIFUWZ0x^Xy z36CWDtn34lNyEGvpkned{i?6V8}NmrFL7On<^ydFU|t#)rsb68riD~Cs}u7w0*5E! z9e+a3k#Q&ZK8!jb4cDw{k(6o@A7+Q1eW!T`T*^#$A;H`M#x5%SuMl;_0Kkc5iDKWd zE;m|c3z<(*w9rL{hPUr=;Jq?Y3-PlH(+@r;6tvOEy= zt7)ctlp6JQzGl>LE)jax=j@fR^V1s#IYWmgdT)EQZ$~~XB|c_PCwZLcoqTCr$U`PQ z1h>k&+LzlEJbgIMV!thXVt@GEneo6)qXR1yPsH6y41rvCjdlVhwtPr&s)QY$F?^N( zp=mrrAkS{g(3*?r$;r-swzE20!|2#a3G1@;*~gyF_|jQC zA1S=M@xMPn4cH!9|F0BjC%6dM+)aFK+l}wfvN1iL9tTSn^WLzN$K=FDnL=A; z=WqX+1~fZK%mzxlB6!dlXmPgUE;6AUB~zoGYSZb-{Cx~@S?~p}32{&bN55P)i@H9Z zp@5bfNFSQV;gHS|*Ap&n#|O8Ab^UDdckr4u^GjH3I&T51&KN})_9fhOFSB(Xa;{Wm zEJCEzO&Ue-5oNJ|L(Wh0&deP!;~H(Tr}0gG$iG}%+la%6u+PJXZ$Pp7(bH#a!~ zi<6if8%AiyGFZIthW(wb!n^0=Oz6$AH}%acAzf{zhXu`2S)lytoy^7NE(!KfCR-tv@VyQJ486PUAw=Iil3`ai70^KcekeBm6HhuqNpzHjLM7K%9J92 zyA4z4(?QsZ$ql@Vs2}p#TaYb%62l~palIyfVlm~1_>ZY&X!5-dv}&Tl{P;=$EZ#`T z;K>_V$IlNx73*Z#gt3ARt@?7qs10gtR?)XpDcgRTM>WB zEahyJ7$St)Hssa}SZ}6#sE!Y;=Q{eF7H4D+H$46j!YCtEr6Y|{!LV1_zQ6DSmvw^0oF8)w>+!r)$5F z8R>8T*gFW9aPk;z7>|-(i8Zr)Q3O9(nSd6ADb|iMz7##|Zv1`ryZb!h2wE1VL zW-M_&%FtDI0szpD6mN70FQ>o0(WaXUITt{N^#hlvXpRQ-y#R|8&5qh^8un(?ti$>o zZ8TFHB#v?|JJv+0)}%-A!+ueI1`4HUl3yi{PiioyBrKc7+u7;RAZRu2Mpm3nE?dHq zs}OwOg(0tJ(6iX&L6|KaKW9%E$T@92|zN2pFlXUmlCg zSU50BWstvBN=XQ9kHtGC2q!H_i1JuXM2;@h(Be`-Pk(Hvkfp$BNNU(Qg2jb zj-z6cbZ8paNU7|`^dOp=1^0PXMnZ}h*cg5;7f8<^d^YFj2xul81@d!`Z)vz;>Ed;V z`so`GZy5koR+zV!Du)YKfam&>TVqvg)vdT`S*0DxxfsADi#jlM;mXMjt{c7?_#31m zo4LyS`QvHwmK?5)UJ$fYu*ZNsSFns8K***Fql1OJ^TIi$Pwgu5i<`|&3ndov!D?zf)UW__(ex6Rdt0` zM{S2dg}X#MK8L$YJFYWyoqj*$90qvAJC~_xpP9Qj-Z1L+yX=9M4}Kp_u*L;HU6YJ4 zKmH(|1V|2ov@{^X;1=4q9djtn!cNzV0tL zxe_g3qL`cASS1i}Pv2Fzr+H78-#Tx$jy`QI0W7b3GFx#Ker~#d-q}?o(bI(cj23yt ztMDl}?zejybX+ud`tK(Lbb!oUlbyVp#bTEUP<}@*0W^qUL41I|KTw&bs;!sp`-I_a z*d3Ggd(Ks%hEd)m<)JkU)yMqo&KN=CWU@Y!7z%8hJl?@f6gu7;24I<+7vWP9M5G^X zkgIQ`;S&*u4OWJJjYpFz09 zZqE|r_bGy>zzzHrw@I*oiqt$XK}!H*WFrwIjB2`K%cf9IE3#&iF8Lu^@9 z1{iPoZf+$Lr84L^^jCv1;@DZX-XX@u^Yc5E7xCMi=TKZwby{fwycwAF;w$4mb5pnJd2zH55l>=v0m0S>K0j@oA$Cmo4P%c#7oK#uQR-`Zl|gBgvw zPKQbdeNc@1ei~wMk#jcrCbS5dqx;7tCpztEKhDRoEi65$KRs#)3Sm}=JVfM6G~Zpd z6P?I}hV*-Rddm?^D%nFz`m|<|ayp!i{NuVAAu4n5pf>$RO!GSi&w`}abn zqt`Nfd=7(nIj{-K6~Z}A^D_GfA=YqhHI6_}90$Wr#!FJ$k<*1JGWf2aZ~}6pm&8MB zb>n4QQ;Nh9<5Ty$#~t8g!NJi%*(gd}A=Urpa{e%Y*&o_EHg8?srr@599@iS_-?mHL z_0aHaW9L1@u|VH$nRawh%k(I?%NhJHHnP{6YIkt(19s>b-k(wU=yDGUhoRbFNAOF~ z7}T*nol!YGq(C-`CFvL#Sc#-p>Qq;Z14HcRy`I{!)<=VAzpRY8yR??_e`%-H2Fx)rZYfqf~l2o{3_ei zIrcjD-k$x1QlI5@n_@5Q*btxWPas35PlHgk%1ph+|KC5lru}@gh5Hrzw(mDPz+^C% zJksMsg9|A4-87?`_Fs()bvPnP%=2KLava4j$R3(4q~m}^8*z!_9lz{%R#=R+;LJBT zvNerUCv0WyZf!;z-EH6LfR;N6qnbGf7$4Spp|{O6XOG+g-j;L{!NC+9d7=qEx)Xhd z6UJc4V7{B`oTwEF64S$bKX8LNkIpD{L?b55bR>ou-0g_4nk)QjO8yu0&4ja1hZQo; zv084uMtIbVRanZ+-(YhzBew|6QRf4 zR_Uw^Sk8&R1G6kP^S#q03NfwYj$%H#J3WR7q;vPF)Of+drP-&gjuyI^#ZdX6WfIt2 z^*-WQ`|3nr&_Z?s>aX%dhp|T(J^N=DI$cc{UL6?;pq(l^C8lUDnx!1AbZ}R57y1)n zd9fyo^@Urk31l}Ea`%BLpSQxDn$um#FG=Ni@$oINpqt}Z3y0IeBr0@A%e zOn(;Qp0BC&lUY42DrQ%hSA}}PDg(zQ z;m+*YjVF`&uS7ml+|DA6)Bld5g`meR5hn+7ei;g^u=rh;3g&4O-o>p9WkzH*m74f& zyHZbcFg+e0>2*^2<`G`OEB(uglc?qbb)Z2EJ=5 zeA)|tiv4#2Nxp*DC%)x=60etLKlS{P%Ow@@Ce^=kRfzYlDVuNO9d14eQ*;7Ul&rcx z?Em0E51Zfnl^-^pCM+Je6)7s|YB_TplQ+plF9Nhzds)I7LlO=mhu&{eE#}xo82o&} z5Q~Td@|ZT);zf*lbO8;1OgxTN*T z?YI}6RqZqX*BSGcX*@%e#9hBmsPt+=FaMSMHVHSO7cDBGr)yYjEM|5#=$pyn_O~0q z%RRXuIKtpz5_K!aj`oh*FluQb&IO%0avb#vqQWhQmhL`(tvFg|y{?yvG&gkX7Vp3V ztzb23T6)wYY-U2W3c8c*z<0$bea?Rx__n+l+l+gq(GvmH0DeH+)nc^sAe_@qvEc#0 zQzV-Ilk0_<%9!H=3zNTlqoo~6uQ^wsSSybGIwI!R;2OWYg}6!}e%)5Nv7&xlGgZgy zSK|fozpjldcfGlw(wpLFRhV~>a_!mkw_5G|leSUj)Aw-X?HkyJuRGz$6Ecq>JLbg$ z=k#EnDyaHyGkrE|SR~`p%HV-CWH>7A_)B%Slp>h}FzFORayf~O}g#9E0k0eD3vZ30=-o%d(9O{?X*bk_msdm35^@}Z$~`DpGB zaf$EE9g*Hx^buk)DlPpA&w}ZJn7=zj|1S6tjXCF!{(EkkA4DSO>ewFtgzv-qVS~AV zS=ucuj>JQA@jcRD3kq>5qhXpCd8wPDE>X z_Zw|-e&XqPnOkopvMdOmGoJ#d-Nv|9ej}}o8jYUIbZ%#K;1ldbQQ+(1lYhp4{hDNB zPt`O4-OMZvpK3Sa*EX8%v$-hf+Mx&RL282xa|a&1sU0|{4TTh=cF3y%IYi^~sVJDs zl@-Uwm&56VNB7*R)kPXGSn^OMx~zSYa<_lu41;YfsvyQb)hhJd>XUYSso7GLLhF!w zON=I-jSv*GLL~NHPbKl*n?AYNe}chX#r)dDv+6MWNkQJ z84QFcYH4mF(z3dFZ=$6Tu&ioBlNqEo6sn8n!0bXjLljx8)1p**mPHFuTp7>{fgpe3 zG_szzlQEBk5aW{)yfxvu1*osAkH7jXcDYgy!=Z13C8_@@ul6*+n+e(85WYO`&;?jX zn`Ryqs`HO(f!{@MUKY^~{M0uG*Y`K?hVwR=V$ zN}Be#;mf5}@SDXkQXoQpW^$SzkAzt}Kj@+PIN3n@0{n8~Vi!lvx&=H*mjcH+3={bi z8Ud;`#EW++ztwS}{+?u~8`#L}))!oJQYOC)Nh>s#mNH7* zts_4En+R^om1ut?j*wjBGQ8|3>4xvkX`YX}1;^~ld12kK66w7E!LMi z)qu;=?ze3JXlH{+%N*z&jRVD`9=JTcz5ij4a3VvSfPs~MV?E0MF8 zXQJDUylMt0kYxAYy451b~ zW*kSou7;qh&affY?eJEJl;jMKkVZZcc7;aTebvJ>|Pu+dq|es z+tdK-)EKj~IyU^0SexV2!{8poRF209*_i>qH&F*@^P^r`ef?PE2{!6i{N4Mn0vC6*9}?K$YKa?q zt_7MGkPGGB;G!G=;*(G{`ZsNN7hEHF1^RqvP~j*)K5JS6oM{5z_H-51OT<$)MZid{ z$%;{(M9Y2&duH)g3cK9lm%E%(`C;Q}w0(6u;PxJ8nyV9!nRCS1W5Hh6Xq7NznNbHH z4T?cWl?;n37#mwy~vP=yCqH0yf6sEA|+d|!$3~Ts6O2o3Eg%RKK zXv>U1-{v5A?%ZpxtIG^XUJ2P3qEila>1XNr^TeUtM$pzv8k<}(vhmGqD@c>HWVK_oAq2C7U8f&uxQIhEKZcjlAt!@g>%(qnd-|L`{N@Syh`(U^G+#? zhKw%B`7m~A#tZabw{Dj|HL#uKfwdNqh{@mC^c>%d{4zHZx%(nlA`QVaf!=~ZM3c1qlW1VX4BZ91Q@Do(_F|?pGPjW!eM69nl`0kC#kg%-0WJ_{Lln+7U z6C&A{*00pdDk)o90spS>pU<$DB;aYBd0YF*s2FiuMYUVrF~8EhZ=HwqUFbJsqkMio zAtsj!v72Z`b z!ZBNg{d`DX<}!TWH(glfRlc?+yeH%Abthn)BvYPbZ4`->3x{Iq=8%ru6nu@v_)|9> z-tGy4D`bM;o*>4$#5jr(yjlF`Kayy6=mjk~gbe-^6+J>6JZiG?1cVB5cXm!P?Tn;6h+=v&UVFwRa|)$_x8X~=MR#yFN}j-0VES&g|D;gVlW zCt4C`Ia(rfaIjyu!Ua(0FSF}c^&+(iF*iUV4Zci4j&BGtFG9r-MW?{N8pWVTm)TiH z`TnC%-J@#UJPcgf9aEQHH`kWSZ^&bn4M!o)IYtD1utkYJ1em2`tRc`_k(4X?%dVE<@{d2N$pPiL)2-#tcFe6|K_8A zYbNRb3z4IqJiJ=oOmwip38zQ0*!+L<5R1&%jDmXITI2ZrsnQ~|{>b~Z-Io}(c}a!G zM4Y}Oz~R05bi1a@2t<)ROfxoF={#JO-sMhXa)w2(Rds9^A)5)S_ob3E9lgO%&kA0* zpO)Qma`vdyZQJ*Qze7rIY0P*^mgu6^TQL>Jl zE$rkmFAtg^#yCNo;>mkTR2J^5->vn6+xQF??$?Uxl|mvgL(4TAu4^ZM+4XezF_?(G zzMvRh3%a9Hf1S6nzIYB6ti}|WIEd`8RM_fSLUeUr^7N%P_=0=O2K&QYLwd>~X@Knz zN|fwxKHFpM>tjzHaJ9+;T57;zyk&#iC2K#a^Ad)Zt>y|cRf^kRA}#tn2Tup8QC46QWzZjVi-*P@rbKztBOOZF||8_zNKVC$mh?3j@dIrsHxX*2!B7s%2=0; zatY}&at``SdWO3oSOx1?pXy135YAN&2jEv3hz>xua;ghZG*G6{Kx~Mbf&D>e=W;Rh z&{K87x-APiH&Wavs=)D;Hhr8uJ<~c^A-Xh|qT40mh%-KpDJUK640IFb2&<{i-k4o} zEp8xOsM_9_8QWrT`H11YBnXJQ6XGZBMjx?Ek0i`E?r$Bx2039`gQGYq8y>dF(mQ90=^BCJ*xtL&_up3QwW*{0;ki0LAbgxx^?fZ*^Eil&1b?-ar zbosmcU(9jRl!|7KiNopK`6WkkylfP6gf5A8Uy+LrEq2cU!&3KK`PQ_)yVpD1R<=aS zRQlLc^0TRzill!Ue;zqGb+p%!+_;cM(fOBb-aRke{aqIe2(aw}G_SBIEUD)^yxw!M z02-%+lbnPvq6}M|fPKh?*Nj{G+_$4S^kN`Xg}}{qgb*~}cea;Y=TpS!J#=kE!D#*v z84d1q7h_PSeBJ!~G&$dhYTxK1TRdx|*5=3f!|pe-_M*4K$~cwnJQVR-^yYoejksE4 z3dJzrakxhOA|M5tb*)(>#Ys8nEmss5iY3I-EsuXYb5H;tlmAnK9y4eE4vu|AZZs(O z(zODs%n9r`fr^T|UglVRZ8$dIB#qc^PaJ<%MGA`#a z98L+f*#8a;pFlX!*nCrbn{6MrKLNnWwjbuMAkId%@RiR`pf_F!a#y)ZWr^D5AD3=Ap!c9K!$~C9rBR(+lNZsZYDbRecuLm?Ly=mt~O1^-BiPMIYdl#Z~+CJ54gj6fBYp#?Krr7SkR(3R@ zX1x=qZvrK_r@xJkSmy%M+wbE3UZDU42AV)$mr~oPaRuGl$im%660(F$91Eq3qs$lG zxh4ac71;V_1$nJuX~W}l^6mva7tY=Cm6d2@z7E(i0K6`7Q}7+xq5W|~KLK#cQaju|`)k|7V}adelJKRn?X)5bf}Z(=lal5y534s~`SCMVcp0KjClPc~UyDFqYBN;zAtd zrp@~9x>*uAuG=pmD%@&F@SH}R>Q=MxoQs|ORx_hr2&)`YK>q!nLsqmD`c|A9WCxi8 zOpOS)XsIrY>%GUm*ql#SLF8S&o(8nsvPLiGha`7ff}H22l0orn!Iv^dJ*recx6Xm7 zR69{3s?VDpA%0SP1%dFw+(4?t`xlyPIt_-Cu>8r2%@!l+~r zR={|-Ib^i_F_Gu1_1PFFmh+ZgKdUa)?683u?@xQZT&K zATvFxT_u#WU*1I8_bX6}8Vm2=pamr!lkw=+e9Zb_2aJlo zT>iatbO?b^j%Ny|>|<-HiW}0SR%lfKyy~@zB4${@S%siL<%xkhVeM zi~jtahL7j^(FjH>P2%>nC>I{}nUKPFBc#)QF?pxqbobo1mg-vJti*4BD zDyf|RP_vMf?kY3_$-R`=sQbk~2|i)v`qV?|=fXJTzhmuK)wkJGM}gMjY>8+=9k!9L zhGdK>TepncMpZyt28gguUlD}qU*bPRgJV0K{vr3kQHI=4=-XsW0l=g*@Sp}g>8_c= z-vhsp)&Qmhk=Pj{5FSnA*U5QzGEGk+0>UXhWpKd40}BUh5C6Y#C-9?>>m5It0%V6p zrxPWbNQgp#8$JrkI};)xH0g5xcwz*&m_kFlzQ(6c96S-?{jd%*`K^SBV>1{os`KG- zCs0`MlO+Yu(kl(xcN%-Hq&@A*upg|9@ZA4G92L)_1#EE?{4o3Hf(rej$mBmi`y035Wrn32sLlj}GH#IO-kkJv-s1!mICy1PFVnYZT6G+RvYx7QjMXbYkKa7PFJq z;LZKWg{Lfw+wX2dUIDxkB6Z|>M<(&Sr!E8EjFcc2$AndxQzQ;HH@%QCOtuia7 z^wm5n`5ve$-d}&Jhuf;2)Mog`-(Bb*8*G8vtS>%mxglS&(mpKZ2nBD_e4<h0==K z+l$vZ_{AM*BTq#M^lqNltMay6tYGOnrZ2pHSiR{9O+kd1>5we+d#4K)zGk?p42N{G zTKL{#WU~6E8|W!hfpHt6`1>>_1i)M+$ktfpu}}*G^VKqjvm+CgS#2_ld+pw7Dk8V~ zNKFQ=afguedaO&zY^sn-zUx=~7sNtwHUp#p&_HaqN9k zjxPlO1JxB?PfFwTc_d&ZQ#jvW6ZOhTlN3>uxHoAWs{OPmWbpz1J^imbQ3J{xtWZ53 z;%h0+MmdEy!q_wr^Rc=vK!(8YW-*<+kcR<_B{upCi|I|eNII|ljkPC1Rn}>!MZ5LJ z)j5%nnPRoOUV>xToV2iYItQ8faB#|)yoWVp_PI% zcC98g(n>!Z<~0`iWTV?u)C#3Fe>gH7Q#49_6z0%xmWY`xi!j%&EU<+RyD}S4B@`&vQE~sh3lm@=YXMpjFWBEzZmq`%WeLi^b<>wSW1ky zrH@tLrhcQh=3vt0DVY%_t!;oPF{3dGrFsJfbMnk8FKK)V0tWZesUr+A^1m+(E#VH(EPj+?v1m5)n}57BJFy=pzgcm zrbDjv5PzS!XeqY`X_nhQ z;VO1sgTcm#>-1jJps(oYaLwwnzLJV0!mHikAKv#hG4iTH&~beSyl7 z*kg{_up!7?5+OlY3q{w5uUBc;5rerSt0nJg=sCc<$->)IM$m7g{-E&ym@YoPJJ~3A zRcko96T8&t*>6F9P-7_2ZTd#cWj%jzhKG4z54PVDYFZh_NMHB_>n;%f#=NU2u$AG* zHaHq2miiC7$NI#aGaSjVJ|B{TEBs>vVio^~LH?fcEMw>T!}mSKI2@@f73%OvV;QM4 zxw|`)_fqRt;YhJz92=QTVpnh9)|lv+R=M7(XTFBvHiZ8Sa+J2*dUBD~7&sfqbs1W2hISHgbjJO71f(bwv{=nRcc!MF-W`NcED|+I! zh&03q)$YVxG#L1<@=ON2g3V>jv1N~)vVkvBY5O;grYFJsEG6>{bh?nxz8`_PZcjXy zgf_3C9l5VFvAG!7tcC1@gIxw-;X{WRENj>(Y>WBeg1SKzs2(NkMto{KY>@avqjYtc z8d2gY1zzTB7wqks$w7=T>}@Njo*HD8>hoWm4tjYxCUYNPn?e)L8Qj9TP5j5lT_8>P zR;J#iUf$AH>~OT+!`mzb(XDD_U z3ShX*{*&msiUNHbWqmz?%iN@x)3tU*nfp4f>fmhbGFp zUmE(%Q%{5LD4c(aX`Cs69buvs!qIann+2KUegwf*xdNMvX|$ zAuidwq9v8W|3$I((~zms7eBS^>i;6x!u_pcP*+%1*0t5M{lBHeH9DScgjEAU9E#nm zfrE0Rx*&N$AcJ8&Q#*%D46)ytE{RoTZiEWc8CYzzjHgSsi4$YleW8q1FndWA7J|G` z8s525NJuu4JT!6k9Ypj)d}VaWT+lupuZ1K>32aP#aoF(KZTeM!=(iqaM)mKGUpj}p~Kb>v+bDe-qtUSu6q}^<#>IA@}(&;GM zhR$f}o5Bm7=iBn5j*J*vSJ4|HXZ!DhQZ^Q9$zh+hG*M5IIOwh78@V08nru{z4}xt( zyEqPUDaj(kK>{;Lz<2qPAztm?E2@;UkJvUHgK4~&T;ow2>=;GCr96L^$_YO=az;`) z84{AVH?xKsJ7GA0_W?kBklZM^0>x^gl5A}AF`b~gLA`@{Y;BX*%ZU2@N3OsVk4tjBl zfaY~{6PEt8me?xtx7j>9u{+>e?M|UBg^Acb!2gyl2uWDiDbq8R`VE@XTBsI;Hr)QDGU2hSK6p8Rj0#%(_+$0!S%Ey5J4{ zHin( z*h@N%1s%Z+;eMC2HeCUO&;V#c!r;yaAMQ{H=0!W9JZ;9@P&o|l;3KVh2bpz1l@CaY!pD2t@y3WgIbO{9sx0#6z<38|D6f zg#5{>+OhJ}-wCTRlKtfsfCk!{6gc4=x#p*r99Mp(`#|mr`uQ&b+fZi@_=xN?CyS*m zVE5scuFJb(4({ca&UxQI*AS zC?u=vaoV527A%XimG=r3{IG;eV@p%lk%E`7^idECsU%B?U*6*l0ck1BIPGDscts`I zMYpCHY6S26$t2d99Jhz-Qr>C-5(W9YP3soULQ5RrmjnC1 za7t75&IJ;p5!_^YAjWCWp+{pQ>CBz+N9NuPBel7!dJwUaHYGGxPyf*YHfQO<^V{qK zt?}B9r4ygNj^a%-yz{bMw=%?Rsk)r+0a*G`4>bCVQy+6c6fr;3X$IsTHeuMD;ao0m zlX7<`hVRuTt-^=0HwwQ_j$ACKy$5D$d=Tt7ek^`DS7lqaTSwE!q}I4op3zoyAc56b zl%JM62mySnQwNHhk4X+lBc3!_w^oYwV$QGu8lk@M{2QCH{X=}Tj!#_5uAqXAaJUGjWb5t(2quPu6`r@l0a(ZPN?T+2uYL*2s+vTQm_g>O3^m}V6JMZs)$73f_>auj{khZ<}a2Jus&5mBAUvXTxp`Uy07aKl62GZ@(wayU*Gi zn^Q;}LdagUO&x!7>CRRiLP8;diH8k)MV1^Xxdd1Zl6%dT%{IZ6@z`PUZ24J$!y4bg z=*#p`>71Sc{BzzZ@O?Qk#u{B*bR;Aoote;ZR%nBAo>cW}ur1cyAy;Af1oV&0Tjw{xj4j}tHN_4Ubb zj+dfTvIOjc){B+WtZj$B)29Owi!~(mWVtHDd%n)>?@IeEYf_WLTurT%eue+-33<@J zxgXun*R`a4ws?S%`io%b+?vKCZjAfRFtA$0KG3n=a5#sOnB=P!$haI`DnW%~h}mNR zFVEIwfh@U(X#3-%Ow{Rrweelfn+r|8{6%gX+7qN?36 zV3ZXrqxXDwxpx`_w9p5AR9V-HNi*A)`Yc>c?n6bw_mxi|cS3C={JZogU4KbYs>DKx z6rNmpuepOPKPfSTc&|4w*(5hB6RiEkfS$ea7KB2wKb(D!OL6z0{fLe-RycOwRw9_4 z47cm9GqP=*qg$Gm>-kJJ-%5$bX1p8q&BCxwQ+-PHOU7HE&qKYY74*2_AAXlTaFRk- z=fB`um9;c8wmw)yf1RTu?kL3+PV&PX^uo;ufFqDP8jB!2 z2#)qJD$K4gM;py|`n_JwkOakwwGV@3U0p|bKxY@iyS17F!tHt}<1JZd202!}An~;d-u5qEm@~jLubVpez{Wj`7>kd8u_KXV1Ia!Hdm7mavXmAfi}Ku+YdFylR+vX zE-%+WHqabOEW8m`$gKi#)*)fbnwHNk=8yJA(kbm1D&6G@%DiPis?Me^ z(kFq3jPLwz_ONU5;p$1}*jlAj%vbdniN!3!HFI9t{JT)W)Yi-v<6Ny&=+K<%dTsc# zXWpypadbzRr1LYMd~Fl`#%j}#D^6B;@y2Wwnh3wUJUsG~tCJS#32=Sv8pT6O|G?W@ z#8GnPf~308ZwBizYcQfK-e(Jzq?Ue6t0XK6{v zMyW}nBz|Z%^b;jryd)A~OW2~qY-dzpzTH{q6QzDgEaTk90U2T{M_?OSsia{j!6|3o z&@IWRO`O-wc8?F}VWFc6Q0Sibjv+ylcf@V&fhh;D ztvJ6Kny=b4BwIBM2JprnsR9FnfRspgH_U)^cb9Z`-SfQfz3YDA18WT*SS-#y`@i>ZV<~1T z+GFe>pWT~jdOEMXP~OH%opFm0HCOd0|m2R(4quc@R$s4Yu^) zs?zRxyf34dd#`6Zm^hg-*{@7wOH4Jv?e7eY{Hj-2yuG_rT4ub#!(y_WUuGPci!W+p z!GC_JXJDD zZlf=r6OlJ$KEqczF|7RUI6YjQ0ZQ$#OUO#2>dBjW&3#TA;kD8GW z@`k6Fx^lFFuO+E7E8=fDWYWuDf}M)})Dt)ArP4eL(H;ElOJ=cmw7vbme2@WEFoqrb zZBtT;Lq}etut$-RVNcxB8MVlgDU67MGSn6g{*>OHT4uaW7T-vjku(AGSV|7?br;d3 zoh`yzdmoz;7+x@*uvUx}NBLDuXgEBZMU;iFEfW;u$G-MHjvz^FXjUYjzSFS=EtW~{ zA7Nbxe`Xuplt#9r_+{!0?h9*U>a(fOMGKar6mvZ?SUcz89O~$hWHalxF)qbgB;n<~Bt@iC&vC2%}f(ro%fKeDggN+pk z<^%Ju@zcUT!R(f+%Lg2KU8g=t+*5gcEbRB=<={bQz=krt4z64gk^^hnHi*ZBedfJr z`g@#C1g=cj7PMxw?^>7fxkX>EatTI@dn6;(HfwwGRdp~sC+N|o#x3>}g?@u=^uGy( z7!R5s&=Xtsn7}ml6pN$Nu6(MC{-szs6OS&6Yq$$LYW6 zRh;mCEe}mbK!MFJ#5|&jd{M|E)v4r)yh3NEVjA5t{ZzCbFE;MSY`ANI85;b`1Rb7! z>$FpjY>7ks!z)b;rVq=WkDT%yeyS?N#i+bWx)PSDLsT5w^dYxTHy)l%*{n6jtMU!p zZ&O|Cc#u}pCt$^JTMQ;nP<9wxhy0~kKwqTHsp0;eA=Z}EieYUYdvRO(INn-)wRChI zH<*PNTm4@KeQs+gLQOB!aJ%<$VI}ZQ&zd)vEKiWIsK<8f_Bb=B#_MEN)^VXum7%oM zYk&HYwj@)`V;exkJ=~m@cgzzpy|eiHoqcchQ3x<~+9vpwlR3~76%i}A(QQh&?TbKM z-C;Y++|fXp_2dWqQRr``ga|e%TlS0eecwl^=ZVmJ{)Eq$d9GQEOiW`9ZW}=y$NY#b zI+BWs+Klht^G1OT`$HGKInb~Wi{7Q9t3SnE7CS1>`DsYtyOY-xnx=m<23^_UU3s>U z)3dp4R5a}@O5pcivGO#%2}Iw_|3`czcpZ)Wkz@bOkov=sT?Uq#R_bdUc}wa0y# zvV3p>km`$Pew>Rsf$^o?#weED>&zUIC6(RA20a*G?LYq@Yl8>fpuB9C2A^7He?E6~ z{a?@Od4gFwD#zDrH8-Jz_gsH~U?m8n9jaIv&@ba?E=hQ-;@&;n80PA}FPSmh3Szx3 z?^wQ^sQk42g6rof8p-`sq|L~OS-xr!e5dp;r2~qP^982L{UvtMmVKv|`p-nb%3z4D zz#ZhU1U!2_%$2d+jGroH(J)Owc;JmxsCLkKRjci0@n}M`a*8iC+e*%waL^GIRc|p; z@OJj<$p2vRp>7gdv36KwAf7L6o$xRAbFQDoT7wkqc#e7^>QhGzgA>2nXbv(f$LGZsCu?FfYQ@N z7uhtOl$=a96%G^wzX$LlKC zff*Xf)@Rn{)Xswt-O7(2nOX#Ll7BO&#Fqik{NjEO{hm!QnL=`fkCyy_!5jwtL0ZT; zeR+t;dEL*I{EC~i-Ju3odlachi-@ju^?2@~$O&1C*=JxDB!d8q;xRh~vtx-8{*RsEl z=jpwgFXU;9_8TtWa_f6f9!#RtieJfG0^zHie9jLdhrz^g^Ep14egQuBighD!Pg7Czj3DMq4cO@q) zHNxT1wa?b*{nMvkMtt_YEVzaFFQfUG7Mn(%C^k7y3~~hYWfm0YOjL{^AvdgR9eG%- zPd&;_7AS%Rrux#73#hxHzpAs6w|p~#9qR$y1bI?9T>>{bG%&@mNfm}e@hLk*>%(&U zWVb!SJv3QKxsP5HGNNhpH4gA=s;`sKuSjvk8Y5hzIZ@d z@pTnaCY6?9W*^*bz`sqFViRWhes{#1t&h2*hPD=1+O^t}k~b~pG?Zs`316ARJ9)#4 zI6yo(rXo35@pS3P86`zR_2xzazDz~x+K+2?1>u)zjj>qjR;nh&TYSUV(8uA4PR;;! zFRB45tMY&3!r+`O*rTWVKdEJA2aDky`GzAuovV|IBi04<_te|m_!m;KFEz6dykE#x z9+WOuPdk_l`U&FJBKsplK1-D)jM&LJPYktEx9!yZeF9k=aQkhZ&-t1>dA`B#yMBNP zDdO^(`Qk$B=zHcS{GF?>4@O$s0)3c|uJ|o*B=w_0Ms5b0Q<29>${)nEzZipDxCn z{7=7wbFovgryw@U{5aEL83jL>CuVs%%o0tnvzvvaYhZq)+MNMdE1KEVhD?NyP!T>3mfgrup2t zL&Fc^$jw<9g?_O$L0WjynmgC|i5RA1H|5uPY@7i3r5?}6*Peo%JRX<>0Yy{XB`33+ zk}M}y@oS^m1DFJDB^siy4`Q%Nxgld{L;RCPgP-#fB0EwVVX3f3tSr^#k0@zQQT!qN z9`xrhKBI;HfQX(mI;F2WEU#=t_wr^5dwNk{N_OFdcv9RGQJKsAmQ0O2ZtHyL)890h zW*DKMpp8Xbe>=SZr#HpyzSb)Q)Dywin!vM{%qZkP#03&6IXDIDWGX+DvwjTEXL(h} zAkeL~^7Y!pQPP84;KDOJ)KU%-nRSJaAK|?UGp z)#uW14CY6wOhSUYFComWP#_D2C8IEK@stw{>WalSKmK(3E%dP{kr36S8NHnlc~fRU z*Kv#I>T(F^1jwq^ms6Z$Q+N2Cj{oR%)w2WHDH|^J3iGu&eu0eZ`X_G;yy1=L8h^J3 zH7oD3EbB%nE_;h$LBEmWTFb1bl9Lu-l9Opv)g3R@XS8@{e})PiLAeO+;_>*JkIjaSqn5*%Xun(#WszAow=y@FBb9zaU}*4d^oGvwg86jjBNcbmc#GhFf?? zST1oWkIB3Rj*T70`MKOwuLMYz+wfx#1I9dP5k?dVhmRJj6%JGLW<85N<&n9M({g0-4hT@#hw$Yu< z>OW<&z-qnjF6{ho)}qQ~<{$5`+mUUHbc^*`k!>lsjq)r$&DYo&0CcIy?u%6NvLwiS zPJ+X9Nm1EU35Q8Tg`AQxD$HVq2QMx;u|9Of!n7{YA=H<)fW^dl@ zk*xBZGRmgAnWl{MtpN-X5s`oue#dL+a}2C!UW%s&IT>Xepfi=(u3eX!rCf*NWv3G^DM>EYgHXx<&Oh)e@Y8Sc$c97OgWHd6 zTpQ55xm#%8GcX~b9T?PCxvpGVjvP)`{Je_+YC9s~lJX%_Y+)oyxzOk~A`!Mp4U)a( z?x^&)$@!?19rWz@sQ2|)BbGj^)ba6nB2TF!6m9NK->w&XwFhf;!KIY<^gr?SOeVF1 zZ`n=b-6GH^FsRyr){IGTvjNlXv-v<)e{TI_2%8oVZ_rikmT%UW+W z9oT$gqnWD$AwHFM-Am}NZH^uK1U>s6%(aj%ESp9T74KLM$*GrQ@PlyRP2lO#I$$^% zO4AHEn*9lrs(E9L?+`Y}G!KSnlLL@$9Hyneo!WZ<=p5pjiVa#f(#-4v$fAfEA}4;Z z1xf6YDS>?Rgsw^VFszRrMp?x>gp)~?G zkSN8|Q?rdp-k^p|g-81M`M==;X=6-8P4;}372>nA5K^Jl(!ZMoUJ}k=v*qY4SLG34 z(*X;M)UTlBR!R6}StvV-17toseY~EC+*dd?a0-Amumi9LtPye?)pN_?&e`sN?2$zy3r2Jpf{O z?8Q!0?-PprEd@r}*f<`7-~YD|J!~gh1;LJYq;P^@?y-y3czw<97wt=&TquB)+YU8P z@CP1|HpGGAwp*jJ!Sx}(H|7ay#joW}XP#`Q9Wae{y$E!iHe$|(#Itoa{sXCVe-O5f zBfd1w@oAm@CG5^KvsbSfRjE8=?fPdKHGYb^JI6@Ah`O^89qh&=s#ipS`0KT%Q7Md_ zTpl8~^`}!-r^1m8*aAelRsJsD`E^SpTSsh@(~!Ionksw5n4h?iFM1|UAsd*=$o5YQ zbeowuwe&+VuzhG_HP|0lmK$-4J$D=9vCfSL^oPO`&mEeM(9iL4n6W_8>M68OcN9sC zYs=mBof)fu$Xbm&Sdo<7Ik?heOwN^K9X0hA51VlrYEw<>2;x_z==15jh!~a??#v?8 z4%A*;X{U(vy4CW-*Vd&k4eUw2rk6p)jqtei`h}>0a{GWrAAE`9SMJTPkT%@ilC|$x zjJr3j4LA&*ha9tc)l(gokrl#g>1ounCN{$mv`W;R@O<~U+r%)@qg(Ei!mz6c>l^b$ zYm?psPNB|XT6?1J0#coi(M`F9`vkLUQ83igr7ER$E?gfLv6Tdumz zx6e{P1*TeY4;pjb7cleoi2FzJ_VjKTLs9!<5e=M|ZbQ}=h(6!5 zYx;Egm)VUqkkCgKdVf9Hl%*QqGV4;6e)SfeiJ678U$gUl8&U9mKfjNU1d+Ga#>oMa zhmTa(H^utX6Il+rCFM>5vA|oH#w!0$srQHh0A{}4tDTqM!zlC=*|p+l2Pi0BJn!SY zRXuyjarVX_WB4mRHPu*!UxWP`lLW_k>zzxUjj6#kc_2Ki0OjeE%{5v{UmyLuUZ%Tu ziCt6wPGowPamQ$S-3GN)UeW~6@{5!+$jY0njA{V4<pOG?ZXQ&EaYi8# zPm>0Mamz3_>BGp2W&npR*{@0TMD^8Zt+MHJ+`vp^9d=}Q3-|5s37AY;b}~ctE;$Py zi-Pdz^`FT2f_u@uJ2C|lkn!Q{PlrjuDR*g;Z7hf?VF{1bhMsUD>|_?-OVztF`55N# z%fm2Uj4s5@T~OX7s^9PJ&o|;SX6-Txig#;9|Ey3^P2_@~nNdkB2gGw-ZrIr3V;UIC zQDj@C*CC->kKKe@MehtlPUXd8DO1urM3rRS(D{amQ|8g(!nlEGbQ4M{G9?|e|D1g@ zB9#)9qvij_{lFY0oLoD>!JE*SY@6wF75Ya7bObwiU*UFs8eZ4I?1(g^|Dq`L zeV1kAFP6lb;QXl(J-C;g0S5|*Iy#L3S1IGn@}{HrwV>wy zV}LHDhheX$TWWL(#J5&qWTRTr$={}B13MoTgU{PJ%!3|ln?9`w9=ULb_5rOzBZ23t zvu*-uHo=`IqWF2ne@Bar9+8;h7`UKBhOP-L5|$Q5xhX(~Sx0v~E_tu$^0x?dYo5Qk z^j&lKfFX?d_;2rB9953*<=&m|sgTcvz_ZC*Q0>pldG*$9$NhtY5*|xBfkHRnXh}xb zUi2v4^ivo0&c95^Vfx=rRFF!dX<$H(HVmBMozWQoQ#n3&;DHI$uoo^4SWh0r4$`;^ z4@)~YPpn;%=gmjFE057Y*8UI4=i^&fM=`Qc?kuzJqv|zLPcbdA-Ed6nN!&=G;+2Z-1#Qe|3Z&;aF8LTc63A%^}Pp-((z>^2+huv~%Tb`|h zGhQBH(UhRVijV!gCS@ZU$nt)Osr5HZyMkMT$NEB;|Voad{Vs?w8Yood({m zwoZL>eQ5OthMOf1e7t#t+XZl%MR>ZJmEn+c;0fgSk-B9#D467aW_=N(+3mJVakkAF zeR`NKI*fXIZFRaAcwDnz9UDXfgeS;C`e&bOz!H;k0HvtxL76xFz(*%n*bz1a4pCxE z5|BIBZEoi;Y28K;_$WDSww?}t`6oVUi4z>_#~r(`O%T@forJ%~17#32h2{Zo1h21> zgT|_b(VO*%bm61UQ+zFo+_<|&Xpa&p@3QK8^9wi|$P4fsV$zs?Sd4e1!(Lb51CaWh z_*GqPqqRe~ODh8V;%(ew4nHmO4avV2{#hSj;ted;xWx%dD1ZLeR%z*zOm$T_Z;jT= zYhLW@;C45qM%ZNiH<27u(k^U`Uyd`1KYD&53AC=RkJmnLE^S2Q&_l*Se>54dSfiw?-Q}gus2gsfxOX~sSH*b`2ZU)7cRIe zj!|LI)T9-RSsPqE#9W;?U2J zq&cTKw>K6mjxUB6#`8WYy0->UpKdbKNB0TC8<$_S%`+We~DQK&f( zp}B0jk{?oQ6~guE{Y3YBx>6*)nLzf3RG4OUCSZX%MlgE)=&XFlB&y@|qR<|1)uc6N zGL96Vgt&Rq5gB2Rr&-}{O8Q%eI)Y;+1d`n3`bW!LN1ef~^Gryy!bE73SDW&)GBIJQ z{`-tV1G`NQ6XeEB=$9DALNFbHrQuZuFv#+U>5IY-rtRv z9s}aoO`;;ZZ7EGp+#bh>p44q!teoB;bD?wN^Mv+cD~*^86BGS2y3&PQ5{_MMxki-YTGKI~46uKT+H8cfYP)E^@X)HowXx9sJYmBjP{e5f{E- z?F8>tA#A1ZtWu}4Hrjp_J>6Et`vi0i4GSZGrQDm~$a|8Y-k*K`xD@!BZmlxw0Gd@Z z^SC*`+4zV#drQ+6rGQ~YmoyU?vA4&f$6D{kc>i+&Jxk!nnsVk+?2B#Ylz=9VEEhbb zRu+L*m%p6u-L^@)gOwL>jD@bNpKoa~C0eiv?z}&ffsSvnqE1%=ACSzYf-GLRjjKw& zhj!*loHitr#``G|0cM!lFiBFnE3J~IY(pmKi}A0#-xjZya3@En2*x+uc$vvZ=&2M> z0i435*n5kI48q&f0RPKg612bOQUTF6BlL{SUv48@YBJnGzcV$*6u*jG?pn^_!s25FAag{`E zXHVHb?wZhIAFb#E2pK#gQBCDER+&vwZ=ECkikn4+zuxb~{iD`TBBl?`6HS^-IAUJi z?f$M}*fHBD1CdAof?J1@aCAayppj8`Fgi(-duyOV@p_*V72zJ& zG0Pde<1QT?zfR~ydT)l4+*n@pEiHciMdQ0Det9hc+*M< z6r{SburYQkm_fp>vytuSF9g(2MNr|m5nP8pan1m-Mef~%6qGbpi(I8PZ#6kvtLKR~ zYv6j60*>{7-B9$n0G+1!_8a%lx64Wb)~xq2ryFHTP8`jj;v{Azna3E%UI) zpU+SPKs?cl+siV~{}j*0LK0HNqR`^4cpcr$YclJSKatZ+yB!Ky$(W-++x?YB9Xo+L z^_2l?hr46xm|1hcN=;<~cOTa^Uf#~KTPErCbm)Qa(1BOM?9bO~&R1K04AdY1;&8U2 zj|r{X&SN%c2{I3)tA26#<0UuQ%V%LLwAu#|7S7-K#VKkF(vQ39(<<#HqlZ2>Ctx36 zUwbtpuLv*!?wAeS(|?m;jya4 zQ9ZdVz3jho_Q5xj)`Ot$+{SuHACnm0Y7}YZ>-Rj~vA-WU8KR@3D-WIrz@u%*c4_K9 zK+>LS{$_^s$+*LH#9!SiF1$-x4+j9on|{kmE;CQfdL$txq^B<*J^%Aivd$#UblQrL zeqr&r*-!GYMi8VCaDP}YEiw7pbeP+?&VR$yQidN#&JkaME3WFw;_yEf5WF0aexSVm z84z}%$eZ+Zqr8~Bp~(9jTxcFx%uu?E9=PKZES|PYRRAcl6@!O0^%X;=w92Qe`e&VJ zn&c-`n^2M|CI3MQbc(Vpkb;C#-`NlN;-o-*1>#4W#zZdM)u1|2Ks< zx7A)u2r?J*c%l48!cy;f-az;dGXPqTW!{(vBvk=Sx{8@R))7iJ@rKXF5krN35qBXa zwl@T?y27tlG-JM+n4W$qymcWJjDX+nahTLQGNz1xV&1r@hg_%S-W*&Y(aZ`G6cRXh z<&VUJ&;NWe5OH{k?_w1G_=Cc7XP(la-jimU>$Jyrx7vT3Z;st1#-U@yS6kDp^&P+mfX0KT7NxjksATKpZ*cGPUO2Q07GpwXflxizIQ zV+j1W8fd1RA@WmcWJo4Qb`VD6N~2_?XK_rYbMjby0M>=$R(K$-GQ{K4M$O@bA7CqFTmn$tQRjkN8FbHPRQM;I83;P9DZH= zYUXc8wKtk3b6UONU5$50lx?;#J5agAf=F^$%o8GSi-ULrqP)0^4q5&-sp)yZm612< zL0`o&KIwA>%MO{0wyJ_X9ZW&a-2VgKYO42?E7$X+Xnt=Bq8kJQ>&=o1OUBAS$cNEE ze%8p4`bv+{B8VxV>VnncaB?6_tb(OxFD^NEWY3LmM{CnadzLDqH-m zi1D#;rly$_M+YQbpl4&p$qNi zMuUaxY4e|@|A!g=5pxWB~ zy5Jco)F{(0*XYO3-?M)(KGgn;Idf{xbsKrIovao4E8-Nk?)9T9-$BU1v= z$ahw2O;R>zLwg(%D?5`)DjI+v@HZ~wwcfD*=R~MjB33NUcrTZSL{r~l_|!1l>gMmY zLL3S&g-YUhR~{1?9gRR<-WZ-kYRf6$h5ZuS1q)hkh|MtiijR!{l{;(&?1e*}{ozDO z)E%f;7!_ZRLF-^YID?#K5 zmO#c5!3yN1R^-az>vSGvMm9t?243m?Srk~f3Va6Z#&^zX8Mnpi+-W`Dn4ueKqo0f~ zIZXd%sOBajci^!eVrntZ;rF1uH=AXcFZ$IDJ4qfX$oVNoKGWgfn{ntpeTR-xK==gV|Q6)J+yvQ zT|N`<-_Kv4=RFJ?nJZ2IF`F9v!{O1Nz<(IzUHLrcbnUl?|aPq(PrZ7SC0&51?e0&ow)7JPg}$aaecrD<|j zugvTj>7-i|jHANK^?Y;xNw1!_5r2FV#GoV)XEqs@ z2dlRA-zP6nQkn2xP4icU=UF?0Bfk`!i-sH-8cK9~t|-NHeSd;VDvXHpv!;&Sg^wHe`=aVu}Lc| zCE6qjjqB1-?{ePr`Z12s@cI!4gh5P&I^KxU)K56Cjs+k?Lcx&*!ojdrpoX|$HJAZs zNjME_K7B_{2pCx*5b9SGA&y4~Yb;ILgy#D7r0=BA=)U*gQfHCj%QIbh*7zGgFtwIs zMlUhFC@@UhbIKs*UF+EzCYl;F7ubBmFM6nq#6_#EC8R^mw%PEJx(y}_OMX=w*srWQ zV1FSahfZ1nU{cq?hhE047nDEZEzQk~0t757vB;d{G&oOc8BX?HXZ5B}CiRR_paMXu z6W;2%^+(qR>+R4(A9g^42}5?|D&g~81U~xKXZtjFJ)se zp!S8tOy|Au%leO90Y6%|eew97s-JeO0dMWOoV2UCBr&DYsv6mdgNNscOlBYv5pIi{ z1|ykSSfCzA<<$qhWo7diwkA6teIm`v2UOZM_HSmBL2P@F4D^b6z z5}|WmPR|Fu5(6R^Wb^jgeZKAYiEP(pXzY1ojaGx(ngJMb zp3`mkc8$-T&9AwHWOvKi!}C#}5%IL$Z2|X!Ew8(|Y{<-ugHY|9GzP92WlOo^Z%@{` zao_k}{1ZV*@-KG}IvfEK=oL?E^Iu3&~R z`QNm2pI?)683ZeRV_~n^3b?;ymfYTdN|000 zM(xDqRE-h`OPG$*`=7|dcQPvOzo|?08{n+y_d+T3vsGnOM1(WP7k^w_c+!ZuqaL@M z53dY$S4_!A;*M4P3*VXBst3S>5D6X6l`VS9M}+MAwr8}!@7HG|><@;~8;^*fryy*1 zcDi$_-g8#-Kqv}@i+!K{HWOX*8ZM7w-bOQhV9h)Jw}`K8|1r$`Z=<9)=JZ$AZ)bvf zW`Z7y%^){+c3(}OByh}w&5`4`{Ql8LydV011p8z5m7r^V6UkZ(9C%IdWSbD5jVsC^ z8(2A`k zF@s7A!DT~1^x^^ncuuW{+v&G!*MkoG1H(78tk)&oa|G8G-Rqh@-qSB=@~eFYh#d{Z z&^WE}i1FZijZBpve?_^G5I99qqI@(bK=N+?v8 z8H#v5S7mXP9)-1H}Z~8=%)B)b&tF_650-xp&#tjRs(4V zvb}-6p^}KxJuzsdu2=Y{Ox zXA^vFbUj@INe_hp6)GJ(k~@C?>IRsdUnn{WOw>U^-YUTlIj!(mz;lbPdiPI$L^$r0f(i)7A zCW{hrhU_=rCXm17%l>ZzTOUNnO*>kcjB_{O6NVMroYLFylWa?F?>pMX!ybELjZc{|=vRX(3QM$co_1YQxFGk#OC55H`7U6yW(e z$2K24@|HpkDzy}RUO2t>@I|P>$DLM6x|00AHYGhRti0o9GpYAR!Sn8)Rz}J`Dx4L6 zxI60JdhO9vz||8W=tV*QZDWIR8u3Y0zqQ?Xxw_eU;Mn+_->%c-;w&R{I!rM)Fd^jk zSR2Kkx`TM^p{8G+=RCj1+a&gy8*}Y{46|vrQ^ri%qwefou4$Fhnf#d!)1GQ#)5*s_ z)BhbIY30DEU-YL>wga1672 zk}ch4(%X`Y&+w$(sS9>v5oF;Sb|v3oU>k;-+@3Mkhps>Lu|swS&1FpCpJ_7`hWj>F zqa|N^2O1{iRx`zrdA~a)R%6quA^e-d6LVF%Z@Ie&7QG)6N2|k?Ru!qlVhxZ1&RZ65 zad}mV3W)y@mr->z8J8KJ+??@<0lgj+%xkf5K0A7jojN~QV=x(LKqyijzd=R+NG;7z z9XNj(=pAZV(6Al9Y4C7MirJA3B3rHTKYGzl zEES*V!!=|EGkX1b(#(&EHmP=a&z~Y=%1#$Udp$`=Nbe0SG5_hG5uRs=#lA%vgm5u( z;a^x%9~C0OO`@>eoH9Cnol%^-z+%Dq%s#GzQwEkjr_@c7zwuGtXDe%UO%xxCMDhWBVLi<$aZ*LqUhAe|2U@dzo*BRfPaq{C{e`WgE{>zH@R}$Sw z3%nEfz@-03r3j@~dQMi_Olo%8b?9becM`ET-f7a)`{Z;4z`wB3dGRaXd=gI@C{QY* zzT{X`U#sX>bbMKg1V`T{1_^l9Y!m8PIIi-pmAN5PFEzY_?dMhiv_ey_;@0|LF8&<57cV&MF4T)@OWj(TO^$ybecq}<1|KH-VSfkqOn{#Bp zCUOp;d9yNN+dolXDjoGMU+CUsa2$6=d9R;rf{E#*RR)8*J(I`54=VE7K;#M?&O%d* z1gEl3&z^NBkS2Xom5Ffomw1D()IS!j9{ow zjvI72XF`@04O^O*Gng^0l#${$;;@<7fdn2f4izUU3!!lEtl|*>%U|q4?Lmd_iPjyB z)gI9H2+zFE=tb=r{Bn@H?zPR1ADY!C5_}tjLQ3ujB=c|VTucfku0ODF=~+BzXT!8) zGIYm(2G1h*UWs~=nd@k%>{Nw}KO_KeUYz3e_a<5*=UbfLk$UwN+S<+nn>+a?zhJWq zWV-^XPSFjj5hn{r6eSb}ybi(Fgp(#Bu6pxx40(qRl&@Wr56QC6GBmUnmTEh!3or#P zVH@py0Rq7V$;xTYhC`hWD6_FAQwQCj54_G7Wu{{N45xe39|abBn|PYlr`<}#4)|+b zSorD|c8y5!7p@y!Xhxb4I~)ARLF@q^kK=G(0sVV%mYw>Uj<#(fzVa{8e1;7C35WX` zsrQyBkR!;p??Hgbz3pLsU42O9l=}7*4SWuN0JO z)8SuCZb~BHk^Ne|!{17-$l^h-{2~OGjuO}HijsIB%^iJcg{uVFyL&oXn6;#n@ zGDh2t^QZK)DSh)IrW@6br|+0MRdPlY6scL^HLB@?njmGwMLD~0u#K&Q#3Fz1OlHSd z;4+(8UM@cm{NP{b=U(p}x(&sxpLt2!2yVsv?=FgvcYcVt?9t259<3fO)E6tKf%9%} zS9d~&t&!)fhtlnytiC)Rb+jj#x_8spXu+6mt+it`G5C$>u$>A3V4|5#v1NETa zvpCZ>V8I#t^-HaQaTu`S%hqMqMn4yyb!=E|4F*sfHy0D?UQ+SBuCsOtjz)`R69f>1aLJY$1;|o` zY4xsjzOpFEnxc;q9_}uy^*A}QROd=tq=vFw^sJvQ@!O!vYlt8&kg8Y`&_N5W;w(Os zh@R!kwYg$)pf32*2{?mRq`*eY>EPEzVP68M``nmfKb1%1m<~V^9mD|gJo%$|O7Ez% zkRa*hXc_KXQ=+gYosM8NvQ0BFyoJSLFQN-OW72^6hU}mPOVF_4pKN_o*Nn~1C zgl6jpAo{-!6V9d~k1L6vBB8eI4UdG0)o`%j{SM!icK~nqp~Iv~=zx4$tkNUkjdf7L zg!1AiYx$<-w?A>l{dCG0E7fIasP|4fUw>T?yO4|f?z-f6hfjrrlFC0Fa@OL;kD{@Z zUwsuDP5v$LNsEhcel+RUdk;_^BFB$<6DLdGI$`B+r74&ii{~f{*m$}*;Lf<=n7M(x zy_~|!N@E=2H-=TAzYI&|Y&FDqji&z+TZ>q@9Us`FthHfPZI% zzTvBrIFpOaa?PHwi&EU@m(P+C?;KrAVhlC180vw~3Knu-l^um`G_%;<*d13J3rZJV z??|{R#bZI^ieVi-xk&J=5<~=vgWKB1RD(ajRhOEJ9*HmS!0K9;hq~^a5CbtbtYMk} zYMbRC>MR<==mdg6O$&d=YZWgNdJ@_dAri#3hI^Y$x~+KpL0X)t@eZW3>K4T3Mm3K| z35B;t{i<^?*Kt^)O-=_kfhKrWx)4`2 zlvn{8c*=mNA`eOAUFqWYnw(q%_=qamD$VVtkyILQRt7}{%~1UGBQo2|o#BW~&0pca z^GG=K*viD9se`m3>Iv<+FH_g@4{3U^W8VT~q8GX$Y_t`A3ZDO{D7{amK!{T_z7;pv z1v3}^x2G8^l`absXQR7OZcYfgPg=!52qW5%J8yXQ4Wn#j(l%XN1ugF5;PtOa4RRu4 zo6Qv~Rog!&$*w8jA6UP!9I>1mc{Ubn?|;w?p>~*v$O4xqfstKgd`%55KL)2Muae;T z@T@Bw)%pKUco`eMv$BxUSB3OyXVoVEY5S zATK*sgo(V7f5Rlh`xyMMm9TvnzOlI>@Yh6^y&r2Soo3#x%{jlDpJM2mSYgV_T%K+{@Fta zgF4ppck2KsP{NtlWi-?l2dpFXY$Fu0le2`c6p3P@Y+*;nSUFk~0jdUE_awVawTrB_ z3E!@&ya|q8{SLdhoiO$}>vJcT{?J*CC_1B4*0vag#JrgML@^QP2)GBVrzlC{P%D*b zPa*`fLie#RNa_l^>qtGI@-~-q*1e0`PyK!^Pao4?u8U9sFlBRox9-xBqnRYgXoUAW z4318=1Oi7(B4GQf7aNL#%U1v~$n3`c^4T9x$`e;x?hA`rIs~w}K92zXj$<_8MfDH$L#B9ST*Zr4S z4r+ld@ORb{q1MP7%9Lg6#moAu$Gpdt)HAN?W*EpaT|Fvn8M^2oYC^l!JTq-d%v)

b8xhfCP^9rxD2=0M2hrf%Y-<3-H1m<_gF|Sl15$;-`F{y8H*yOeUq-@|2 zCCL*0-_cNr=A4(?gsJ-<@xg0Jx9LO#uJ#Wysz-9hXlBv7{D^&jpjs)7>vBI5jSruZ zMyv3o-=1w{HSFWjjN649~~M!Rch-Nc12mCHJ&Bot$%N;2HNXRnqLYD z>8^=OM|q-6^ec;0WS*y$xg)D>%LY!|AM8XYen0L63Y-Q4&rMy|JweGUsZN;wE2w2o zGAf1qS#iLre)Q?#e1wRWwsdVUNNsKB>bm{;am3xlBjMY(ZVnRH%E(vt6-2Sgrv{Omla{ds9F* z{!-4?I^!>Y^8@1gJ@G;l6;`{VG)pa{^hai4xeNLhQ?XAz-k*XFENu7Caw{G0UJTN1 z#8Ao`LFPNGfqWA~0RU0$51{I*wsDF*(!|^Fo9TYlD6F}6n>%kYw^;@gsuE(kD92Dw zFaj806h8x~Y@gOT%eJOW7YhT}eQLNvWKA(44qyZQ4ZebXAB&w290_&IW96n;K-UtT z39xKN0Sz&hO9hy2vAbdRmX_^*V6T4*zq{awB6qY|1b1UTwFjY_se%uo2BnTf1je1^dQ8au%i{B5JFZt zUd>Q9xEp(|hu86CY2xHok<;z6dg$cVAH$!N{5X6@Y=cKHs~Vf0GOE_Txrj*#wlnPs*cawfw;HX=nklMb3Eqy+&-j3yYdEyzV8;p z4^J>KXocorq4ovm%pl?)rNG!{7%P<@uHs=CibgBInGo`lZ$zin$WdaQJQyYMlDJgZ zB0tn@F%xK*w9n%J8de{sonqFtB=n1+p=1t>=5zLdI*Mt6dNh64)5z$~0lAZM#4N`5 zCMtd}&c<=y^yo1DjJ;aAh?dBtF1y)hj48Hdq=vkKJiI43e8|<#OU@qI@@sJzwQu;3 z2`W6i*YlB>c!wFVm@*nw+AN;}FPc{jfodCn4j=fxh)d$6WSvQGBYR^CD@96P7__CkyvTU;#Lmfz+($6@T>n7Z9X@i*akRuz7HD692@KBDnyfOlY`;uFOP~%8oIy8U&jqsNV`f%6~ zgUv4a?)nA$Oy|B;BplKa@F^N<*D&63(QxLLlu4Og;GQN|?OXHZ=u$Kjaq|658TJ3v znXRRssi)4WJ2rT&05H zD2rS343x%9qPVG^^a+iVBG~5q=Hr3T%LX`iK+WfQo7m(0g5)Hh0f2ImQ!X3d?YDS% zi0i|?bRmo<%HEz`og+*?dp9Kf?BtxJ`81IM#XX?i@mv~)q&W-FMB3@^uoTwtgUrAm#HQ8^+N_xTl^!?jE zh`n}pc3DJcN-Tgw7e1KvQ(I5-{T}Ar@W=o& zbFY>E+h{h+GWJbj7s9@hQZ4h)Vnw+a=?_{0*csP-izs4!cM2Aqdzor`HBX2S6(SMq z5b@rnWnzarn>Vde@*^hAKnkA>#nEUEi%?40dr(lKDj6xd5)XvJneT#Hfrq>pp!IR~ zj8G_&%|jlgD#)xJ>u+FH;GiE7Pa(#=HMNu)*?yJ(n^vFlr=ArtbQ{~`O0031hIymRXXuZR^1K#CXFGh)v?WSG zS9!YpE;Il=jKocd?&_+Pa}F5>D0|Sa+ZhqwktwHo+AJ&_+cNbKGG(YQH!Rg_FZ}bu zPhNI9k@u1{-3#zY=&kS$ zFD$sKj^!P-XV2b;Gv)k}d8;}h%0p6L7V+Trgeqoc zI^h)Fj74belI+f}g2by?q7xSVI&CS+$zRsH1qIcYe_y=O4Kaj?&nnqMDlu1wTF33Z z>|e>}agws({N9eaj^3WFbp>Cy3HiVHlxf!>1AwsSBFTo=C7=!&4(^CUElCwmftb!& zHs5)coAv)VxGR7>tg76RFtF~r@5oa5<^!9(B8*kwlboUNBk_{o1EF1^n2gbpK4Q<3 zwAljwFvSq(i2Z}gHgJPkqPuA6W4P;N(2$aN)d*cL=(yiBbljE^qPG>UJlU`k77MqZ zqIi;?Nz#9LOV~aR)WfzP-x9J&z8Ke%e*0fdl6&%L$0N&#Un_HJ*Ny)WuAQj=Skt7F63w|NCP3-xJ zD=;ytpQ-}`m?_xppJ1@4wUPYj0kCnSq`$SNJtj5nYQ)wODPj2*YZDcLKx$m}Uz%Q@ z*wlNr&vIXu!SPjbdu^pqz^j(E%rgR|MWcI-oe=!5E=VYDpNsOpK4dLj%dhYv@AvY@ zhH^WVWi-han19$t4Gf)$l_>P6@l0^T*^@>psP9^?mBO)aqzjtoJIi}k9odMzNa=# z?5Bfm7-uJip(z6XM9py*A17C!%8Rzte`-%-B8OXk`#=>LH9IJn`S!mb*e-q4S8o4I zh6Iy7t|@7ML_RQzOFqYu6UXtiV+t#*%hV$VQ9%LvPS&XO?q6izKlvK5U6 zi*@Gjuve?5M1GiEd$p#yr1GLEd55tu&*J!R zEywH}IzT?;?W3@O^G40)l;Lv1T>jIMvmZ+|w)pst(NNi__sSk~S5016kmy}ScM{fDr zt4%-BoY9*{p1!I!zOYf?;K17@yu5fYkhG9}|9#t;oI3md&5n>*+gB;oIBk|y2@LU% z*$r`~{UcBT?{Ae?$-4wr&-aQU6z@6RdgHK>xC73U0dbE%z!4PjcEND}VZR~amCbw-N331hjDAYiEhR}7p0W5srPK;yah}b@RtpogI zOGdoBJ*}PAkm28CT3$8BO&! z5(bT(&35{Klyi?mz6E(&P~E7Ft)`7_LDTMvPshPqjPvPpkVZl(X5LJg#P1KD$MEq# zaH2y%hVNi^G@|FECxz11ejCI}_M6+6-X6J}C+zJWUyL&T(7vN5*KjE7W7^4r-y9+b zIQED=KhY#?jU2x?VX`BJm`tzBfv!)iuDl*iUG%SL^tPd9g9;jSx*LEce3BD`^gsNsyPH?+!04}XG;5TdTdC}1vh zx^|{iCBDI!p9<6G2F2ebKR*!?czZrFGqy`wR5mmqIaFE}Te?2{fzA#73+t^!FwrFm zxN}W?OlH^TQ#G>r3HM*)a@PP^)!WHNRCUdju*|XKy0iroW@uB58F+ti=Uh?0d7H*mBkvzsrDhBdAAwerV-!q zkk8NB?Mv-}IbwK|(4GwFic*l#Nej9EXX+t78wi4F&FD_b{dWb!8mb$CBYW8R?xZ7C z8n3pkYn<8W)O&eRZgvxd$a}q+OJ~D6zR|&#hv*qs3Z?wcyO!55LPisKd;X zQ?REoh;F%`0bH1x4GmF5Y`oXIk^bXw-2q;snCFH7tw?9Avx3S|q;;hEgY=774qb?78Dz?p|CD+>&&$tU5nI-|i@XYd31H^;cMTC+s>& z7xR9~TBGv#$*2N9U8*@dih0R@yFZDNCmRR5+Mk?O7^yoGj&Jf9%@i(LPrj%8)@a_Y zx!?2LWak)`wL|relf%Ww=ZORR^XMpE9+d;u6Q#KeTt)VJHA$KfSX7!V{+fak{#)jmM1#EkB!$Nh5;2#6@GlYd$mlM)qm;o zB_tml)xP`X&zbbU>G*68MA)uUYd8sX!3b5tI(%bqwGf0{%MyXB4>8K{mWtjpKmkP1 zT|IEaRc4YLWocq>H3Jr03c_Y52;sBBwFJO^<_&wRqjBiJ}jK z-e)aWSucDj%M^wOx`WWG^R~D5Yn}L z?Jhp~#Ul&oO@4L_bDFB_%2Uh`C5G4b-uVIiQzZoXcMgEAgk`LnLHNj zrA>5!X}amkSr{;FqnB~x2#3>;>}m_9Yp!Do5TnSL(ibUEG*nFgkh@Q^7}5w zNkU{4&hFHmyDHBgV%=YcsZG&s&95D~} zg8YurfBP>6f#wuvmitNQT8NUK6Wd6&G02D-*YlPq*IW?B$~_ea>{ zZhAsBguvaGN2uoWNJHl#PaTP-w3yQnPJOO{%a8W(D2AD}M8H>lu;w!^k|6>#9e|Nl z2_1&R#+aUuixHpLg1GED=k<8I_T65awm_9XrPzu%@#$%L_^|Gd>B4XAJ;AWQ9-P+A z*V1*^Z7NN!N!o9R&D?n51#}EGaWeylZTK{rHtv7fHu12l)=J?`aC7PO8;~=&5bNN2r|L>i%>zuj zcMg+KBbmB)*GduV5MjD8++QC^a+o*!wOm-k^;UupofIwQWx3$$;2~C!i|do1(a_^F z!H(VuzB_}ZlG~uRW8SDe@+2?;I)0hXT8cA?k-X6t4@i@zxhQ&TiL6{4O-SX9;I(90z|O8W8ug%fm%~<6 z=G@gx&5vA$wTC#TMzcgZ^Bx-t?72=>6jhc#%k`9RX}Pj6NRt_5kP?@SrTi6OqQ~t2 z-BXMcn&twXP$72@wlm&@ZF@?(Fm4oJtyel?Oj?oWR{(T=NwpmYz_#qd%J6a0e8{Io z!ih3Eu;U*^hb{mzT)>vX)vJh@;gNoU`{PtjJ98gtfDxT!J))!3C@(~kl zNKgiY^nh6ZLgJR7>TiSuN{ayS;TEWg>X_k?Zj{4S)h3{opLVOx;%?ss14N=Mqx{r6 zx%fwux(X={ixCi?{ea%R06C`w0H6`l`}s3i;FLJ%-kT%mG!ow*C9)=xioJgRD&eh9 zr7W!g?F4);E^4Enl#~j$J$M^%a+U>`U$TzPxsiJ#AwpT=$5M9b)p?~f4ZrqR8Ritc z@wmT-_)d#|u=dxxxrV-{N@%G3ClE=9se2zxxaO6*)J+<@01pGi@Ut}FU${;SLD0&K z4ljkks$@s}(mXBw-V7*tO*^gg!<|QI;$0;FC|2XmO;h9=^*wpdQv9r}fuW>$_fj%q zH=j}Mx&!@_@AJ~XumKtSpdsW&Cm#&0cIxkSAM&w8*ycane+5QWs~_ZE>uTap$M4$g+RaAh8l}`r zXCJy6|2VE5XIC2C=XM_nWt#`z{`MO`9Z1c(lT4%aNUx)-Pq$5tTeri@I0_svCXXRY zw?b=lm)YU{_w$5pHqn*J#pWfqE6<-4gzq+YZ)poIbI|xS$XN`WrhXAls6Mgcj%A5@ zYe-GKEJ&hIN(JCmjHUK3CAk^s{IOqcuiPs1rI_NgUwI<@Bz6K8eXpVM z&wICW7_ZK$ooB9%kCk}4_q?%M?HqSHZhK=A%TTvQ(@0~;acgaTqapB86mGyp`pq@H z^59br;yL1njzWiWpKP!QTi}94wr^o4EGaH-l*QA^5T$Nt8(8mgxm+E_^8v9TL)E*i zty`K^C7(FlXs%nf)x4)o{N`@WGOa*>N^xKju&M+k-^WDeV@P==r~iDtex!$JxxW1V zPYx8LcqWIj{$3p8X~>bX+tlymt`Elr>2!{!gMdP;Y<=fGock%`?5zxHb*lGcwre83 zR#8#Wp~d;X1Uk|nzo8T`dg0^sVUd5vrCR7?zj%F4UP`uOh38VExFZHBrmV}ZD|u*r0D5$b5v2TwlVL3GMW z7S&*J-52zE8SJjd;QD5=NyPLM87pWJiv{H6RYnH8`Ifa>cy_c1+I;-7T!bs)&95G_ z|CP+tYbL%~NXBjRXRJMV{}9eD2$qMJI*hYn0<@)r=PN`M=#65borv~XRFY???t7Ws z%~H95Og&4GDdFX&=Sl;s(Ub5IV>UhlnQ)G5DeWF0z}6=TYPN#AvOM{o#YR33_^Z9@ z93-ebk5eY+^cP%h)&lshGsc_bE?H?x;(kvSYg^9N^DXeYNcPE$Sn3y1>r4+sjbL5o zXxK3(ydAtxxEFAo6A~9tf;%sHMrI~!Y(=FS zOuIQri@UcU{U|5B%> zSc+gYM)JqN4RhrjOQtP1U@985I&k=Ahld_GdX0UyX4rDeNl$sx6kiK&ibS(t9Y`d$ z-!4qo`pNRvDRPepScbN&k|<{o5?KgD8742~)d1TA#IfHE7ZIR?G>1kbiLHeD%YUc+ zX*F60K$6`mb`!W-gATfT^|sviX)TqD;AJrPqB9RIx(=`S^jfN^ay4%v_O^VLMhX4! zA_rU}*|h+mS6iv^AHsN2E&0FX2MStGS=;n{ohj?($2hYzn6kdV{w5j6qlvh8XUD6+ za^*CnK0CAPqs-E6M!H~4%(@BSXW;QbKnIt)Q!$WkUHor@2+veL4*1a>r6P$6^MXP$Prq3=`!GYF%Qd3 zIu~2b&8CsG3NoSVhJ;1Z*Cw^ALOg6L9w*olqBv}lTtPPcV{nKaMum05noE7`VFZR-Y*7gLOQXWNzCM*GEFns$!kqfbr4MkwYh9X_%Q(6>{{m(8UKW{={N|ejJ!7Wx?GR!AS zUy}IHKFNKT(RosMCml|!@{=5@Ji^bz!>ix<89Yn)Zu0qyk&STV<{mcvQ9ifE!~5Pa zIxrtWT6)U*A6`SSM1>$9yQ%p8W^gg5+KSo1bK_4S|(aiz}bcyT1scM z_navOsQR11j{KtNN>o}ECZW>ETy4t#Z3;)Tm%Mtl%N*u@1oqCY=THX2%VHduz*2lZ5zE^fqe+V*+swGl;(rK_N-Cs#S*8h)h)Oq01J4g!Z@2`~-7 z$Gjx*#zn^m;ZqJ)B^%}>a6siI1#MjB`#u;WL6feY{_LSI_aFnVr$w4#ZgaifWX2sF z`m^DH9w)oQ3CJz0Kn9_@GM@<0v!rp<0AtokLIC*@y9rfxX`|DxSQBSOEpj~afP4?n z7^-Eifb-KU%`gAji`zSJ=o) zOkV7>)7yiGTw;*Yj)d)>?>}gaSzR|+>)wzO-d&_+>2hkqcxKTo$^~8hyzRWpH4OEG zkz%gr@J_2JZY6)A`7&{Lkj!R~<=9lp?M{L{Y_hua5^scJec5dZ2rLptjAyK2{I;pV z^D|KI&y9&hbgyC`ODBHV(?WR`lGje1;a=LaOMTkJ`!cT@qrvwX=+XY) zfSGh5s?c5b6R?~cP3KqYve6jdPbHkd!~7OntFG}$Zk{HNjK?xZIr+_LGSQ2yC=|x$AGm~&)LiAQBe;Al}?a%_(<)tM~$P88YYPNih+y5oRc7>wYO)^&aZAlhPrZ;zGPl|G;(`d=dAyTF&i$RqJkL+dHFT zZMntWqA}>+SHNlO)3X$srl#gF%kZh-%Q>2q@au#)={m=}%joL@XWtX$E|tboBaQea z!~6Z+SbxlOU18&Uiazlb8i#N1kh?8kbR|9wpE8EUj657+QVshP8*qe7kq7&~>0TNf zk{WV2iGFt$C%JOjl1NiR@-^U<2B#o88`(IzsN@J~^}jzs$rtsCn3RYBoZ2W- z8riCAi>~Y7Mm7(s%w0-}KEZHa2FEz*KF(KB4#d9YN8#BsvcU5hL*(dtzik0P+;A99 z^@`yCGH3L9o@@!78@Zvk6>O&k6bcJ5 zZKG)P0gBY=18B7YOn(FU!@hp5s&q7qX$q(kGh}V$_jHx?d+EG2pwzP$pz%^7`hgUD zWWGL!TQAUw3YF;tU&1xm-qAPucu_s@ym$o*+*!x<;KCY{RUVaN|A;ryL5|$?RoO_S zSnhMQBS*OOW|~Qfg;ZJA7^?10Ei|%!*nD`}{N>{8s`xv=u`t>XxdE+q2+1^Q!v39X zgPRP$vQjR7x`{BZmSbL{SgsKPV)6fV2#uYxO;WfUS$8l?LYREzOn=L2%OB~J;EOPu zs>fA>=1M>hu8d(oT77+dMfxTeQN|@NQP+C>Z$+O+`7W`w13Gfi z#97;fA+kUud%F|6H!~!$^_`L}W14WOG!$P%r~k5^DFH58n8)E``a|gWd>xXWD6s|n zW&-zEt}lg)oelvIzREL?d9pNQ5ce8L@zoVHb$FyrS!!Y4!GZ*64-GCA0V#P#JMyt( z3bXQFu(PE?S^6Sjc!Og1{oB>RQNd=G+kjjvwC0qCy8*z~ho>*pfRHS{NWTy{Xu-G1 zC2^jr;B|C8PoyW>LgBaTPIGp-jQWrFz@$-`8J?g<-(~xcvGzz5blq|zojKyG zyjmwt>P`^?vqKomNksX>d-{FiIxSbLoSsp&1~=o+jC9vPo$_B*tSyedzmtRsMIQyX z*maOS(UXaONvzOr$9H&06EN$e0mm$WBn*gr)X)H0s;oghiz?|ofam)8(XNm7pY~X9 zNG|(Y2ssjY0$oQiIk6eBN}N4By%P;tW*YkC-j|8h?6a>Ugfj!aVYwQ~LU8Za zS&cdR62Nnsx%)`%r}f>h-FyhPb!^7q^%Duv8J~8F6AtyU0EHWb-a+okd zmNhDbWH78Z&;{A>*Es=b(hA^x8<#rXIT@PyBEm>wzadl-YUwsb?X!O)8&0-a&ZA4s zEq)eX#XfaTUjvFmWgmdh@XEJxOtgv<8*L7#^koyEVhlQM8YYFMRTvoFV|ownPV5fJ zG|5uSH}I@Cq{e02%3v$sUJB0FL}4sM%EMuEdoh>iBFy8gt=9c7W>Y@o8fvIMUE~4#?1iZ z!3TzRA&*wg;K=Vo04f?ZNX40wka3jw6ie@u+!oQw{CFBCTw;9iRa-2VN zh#npTxvLIKssC{P6(N*E)j)%-`CT7+lr;U33z3mcOO*5hGf>if(_7C4ceaYKDX;Z| zW`{~+Twnf;0%-Vth}4CGIV|rcsNt=Ysy7gg8rfh$#k`U>@p+de)^3|)hMben8o<<- z%B~qChu~(D}pi0T2l|MKmV9Q<6u|i*Z)jqA%RrI}c3ck1O*b~!d zSSSp5-y7NNaIWzmK9Te>?`K!Nbz`(p>-Y&)STUKA`S#Xh)gV|q9II|PZ3t93a``5X zs$f0VCw#SF%Ug&yBAyTR{9-wEes<{mPt9z=>a6~}m(rDOS?9oZ z)L)a-PY2uf96e>JYMc<5*|hw451J|1ZRiX$h7^qW{OH;G`((MfbGj~1B{_C)qKS^) zBNkRZ1xj5!x*tV?wJOQlWD2=np|tToGd$Zf1tB&s`B_y3g4dydpGR;Ykztf2J}FnB z27ZD0>mSAx+lhR%Fieu)G!^~|G)BJp*UxBLhn&LL|}X%yyh zLPy%R&Qo~$M6Twp7r#oP*lpXTf)KMysgmP!WX)sn^76Il(H_XUeNW9!*>+2-;23lWSx z(uzb_Og34qH`n5`7of+q4*QIdZgi2elfu=Av=e(0bf4E^oNK|sn#J{I6S-cpV}uYq z&=$L7A8IAFXflyQl){rENPlG&UDal7tlANl0XR=i^t1Fg?1BAlQNUq|Df(CiKCk6hXR%!YxLR0$ zxOcoEliRD#>ErAU(%Q-lF0>^XmSV6CUMfcnb7PO}%8W8HDxw9A50IQDk4Ei!vKnRV zy3gs;@Q3l5zS8blcWu}WXKu}Pn!<0cAZ`JE>V{(3sBTNcRor7z!eGcOpTjWfXJ3L$ z*^v*svDae-+A1!^y8-oe7q72JZ73c|=4mg!(Jr5gG)sa|nZLWhf2I}fcGJ;PbLvaJdtTcj%I2B4 z>TN&_uhaBfvu|K;wuuz^UL6YIs{HJfNq=n>QYDHuZe9UU`T1ILSI8F8N`Y;(C|Mzm z)2q)ra@~k`J9;sC1;)OcaU6hpmq_=oz{MFJzSqMUZ4ae(h(Kg)Po7-Cv6Jh2e9m$0 z43au*2E2M|aa&?VdV8s%E+o<*uTgrT$RguP%h#ewN6W<)fofl5+WL<0@3sJ?q149S zTlS_c7+USWI`j6gQ?D?H-&_R#@vUUhn3yF6L2Po%^#|_L6OR%INC)Lg<}s(;lI@YS z;W&tNST|2~Gs&U+6@FmW6WbiZPr&;jH7%rf^tb$1glIX9vIso8X_f9eelr(wHrVAGL#r9LRB=SwB!CT9_wfaI!0XBF{+Fpy$7J%AGO1Z=-;u&sbZq& z*Ry^ZhtvD>vpEca^?XU5;nliCmsEr#+1&)?c4nAv5Cq@?Rkt3NQIAd{%T3R z!v_s|;Vr{3JII>|hT0&zoP0S80iWR2v7oG@Of(G6M|jcnvaG-EKZ~X+31xOR^W;pw zF>Q{CqIQbBD*Dl$SjW+P5Ht0jg;rfd*+y=G%yP&t-#?ZAlDITqtZ!%wBt(>32REDQ znC@c7geguT4H~^q71PS*8cU)nBZdfA3x$)=w%!T;9c7&|= zcJtBGp&cno6ZBn?zb=DixmQFpEQb#WoJ4uXUFm!gM;v8xk(vC(>WfFfK(51RI_B9% zetzL>G8yX%vPRgvbvJ?yuzeS>$yh*K^{O;mS}Lz`sC2I)hhZIVl>WLrhuZt&f?lc5-0clI+aQ=sJ}9@R%bmyR zzzMVGS5%SX^MIh9p!>iUyWdkNc}KJl}m|}2HfQsxHt3}-niL@WFr-O zjXWdSnRCQMC<5P+nFX%MOdqbAm1HE<=+~jhD9bKte(I}$G|c#9Tb4aW!NEdW9ed4MqD={~vxoA_o_a;>F-Rx1<#G8Sb5>Rs;d%6|Gn-B$h%l=Nl}~b! zl5RS+_2w0(uB?NgqtUGpdd{6X`ZL8J&PAI7DkC&Vv)f+F0@Mogv(F79y_c8D@ZNeapT8*K_|EYezk(563DV;yLI4dqgAMhDX- z&6Op;%4Sh{0?7B{5?ly$UAQ9-*$h4>)pC?(Vfka+iu+Y_=+djZCa^UA<&c~*>1_N| zs6@D%cUWfndB&3x-FZGPcGy!}?w9XL$jXf;8SdIe6h6{L&GA`RrhtnPL%pqbc)9LO z{=ja^2oQH zRo1L@w~d~NP(dqybZay#Vh)2R#C-F;xs8A%&X7}nsd_x3%Y`QkbPAV@OYsAc3xTZT z&)3E?3-VR&Fw&x$B*Oq>MxU^*U1I(_^JoAxReH0kxwl~614Nhf9!u`bK`+sp$x5ga zqN1W0P^rt^$*B@G-YG8V!J@eLT}_zb8$sU(9=oMR_33q8ci<&8oH#{qTkEpvnBPqJt;4NyED%$sw)~uVk+;1Oue(kOFr5j z4G4;PpSYdz@JUhHS|~7X3R#NTP7a<&;Z5Co<0t2dPnbFlypP^#Pz=L46HZQxxU5Ko zw2%7Z9d-BoN|;OcQQSGh^1D+CYjFd9J^%OXXGaGPh+)iFq`d`x^WHwLJ3KMfVva0W zO6Rxz+Tb4k7`&*jfn>3!bBL=Sni@*zM(2X`r`VNnDeqa+%d~l|IuGQP)TOs{UUl6e zFUOn}_GHof&&-9G$6+PVAWl~^_f{HG&!GRTEiu?Co}Yg391TGf+-@%%jcOIP zE}_8LKFi_(DdsPXyXu?5^#jdpN?%>thxoz8jL*IbV*c<#gdzl|hT z-1%jKKiypK>0xwHu;h6c4eTwJ*VmT6w}s@zu|7U%0N&}*O{n8B7~;ZG=DA3$D|Qmx zl-PFe^-2#SRV90VhOE^Y41+6HSwQ4VsKLRcIfk!#Xm;dSRYoKV3Wkmmc%xy;7+A?N*gq=;Kc$Vzv?N%1@qW{O_ zuM$>tJcPGiR$m^~{`M6xydG&**{5^D1IAbe zmq@KJt$AX)NFr%U?XZ7RQMkSy;EjUzoydY3wS{6bHHmQ{vz{_pnJXc)elC7L?hDoW zO|uKPcjN{^!Mdx{05ql-rCw2!n!M^#>f=dPM2ct)PTL_Jpw@(=b!&~w6JCKDYp=HN z`}%M~xVpozWfnV6JjR9JD6~C|=R3d2IwfaZq_Ny44jajRmi8V0ql=p6wio8bCj8rL zQd7BPJnGDHN?uAu6G&;3g-r8)xp z^kH{_jJ|rY9=`cfDI+WUwjSDhv?w{@22xz@ZPg(>B&eFpV`k<{6vb8OCOs={|*>of2nHejTB3I{;UrG>%-KUi1A~aTalvW zlGuu#8=Hjlprxam%b$J!x^F&y60IM2W5R44{QWR}3M?;>LD}{Nt|}0Qm?`7($|d5G zBx#Cot)t&lU7={?I!%gGR-rr3=a~S23!j=?P*^y>=}T_aHD?kqjjF$slIQrav3~#Q z*DhFXfjLC|vp1OyWl{Qa8NxThkw5lKa`;9*Ya&kV*@(W9f0}!RsN{`9255 z*d}Ek_iZ&qpD(aS+CYRkwIjk>J1~Vu4q+MFM;zC8TnOv?2C-UYC+4mW&K*nH9#$=$ z7FF?6p+G0znE{XF&NY^gE~J zkQyDgSt|@VX^N=){A4f5R@?k=-=40*c5m&9A@?)e+(suRJFM+~_pI#M!VwT+rKpQs z!Ki)A!o(E82qn%f=Vld3DM2zLdGOL@lgM}p4<`X{;D1e07H7Qdf!xVtn(Wj{fqwEt z-?Upz^QA^u0a;#rYOKi))3+3>opvn$(bq0N{cw z7sY>VReIpO8VH7(XAjUE#lT4^?Ct_NouG{DXo;eV?EK(lOT zFn+4K5)~Lr%CBXX;XFUwvm(J}M+!F&=GEK8X4&&MAATj0(mu%TaQd2&M~C!=^u$rF zFgg)Ar>_@_CtBHS&h)M2u>TiCScUqq-zIEm{bPSaZrA^~`mOM==B#c2+AoiVA6%w|vjr zqpZ(_^%^;v$wx=h6w&470nB&wK(;q}j3+EVr|z}24rrCj7L)Sk1b(sg*R$bYETDf@ z2mmArzw@*L9Yu^C%YN0|g)EhpBFEyTF!L0o&vmNcZ=^gn^2FESe2FBu(6L2BN3-B{ zsjFp!hiQ;`#@e7ljMy@(q^akSdv5IvK729BSLt?q*GnY@)J6q-9EfATH@+)=pY!a( zT_=+a(d2+u_g;Q8=eu>=t#9vhl}e~!ULlp-n1agm?oU>bN-r5^q5A-S>UuLttBxdbqfT)aj7;YaHD=D9> z!@*EwZbP$khd$t8{faMWRk98ZXF>ei>(kv!!*<=zkS#(u#dYQAg9b+hLTV6OhC=1nhMjka{jzSn!#7T|(s&`( z-!1(TEFP}*y$n5GqJud7o;=zN>L9Z(y?ZzXa>+A$*sb=haZkQoICrh9t3H=oR3@<( z;te>}2L5Nf^{}iN5$HPAu?GM7!gu_C?G6AW<&|jlWnIbRY0)1VA$JS*_C}ZdcBE`{ zbbF)G+9Ca7(E5z(&vYYQE@|YmMuh}>&5mtZ_n-_L68q_V2hoa{%Dau1>B5zKW4x--D-}JR?dYcYW8-_fz zMQ6Q~j|n%5HrLyQmH0w&THMopPGY+QgBSmLE{m`l-eDJZjH8%7xOZ;lu8_uhWmry7 zbI9+V#_-FAtX;3u>`^U{u8YNRmOaXOnEriB9|5~P%xQ|1b+r$}?HV<;86`>)nnJOE zd;3c$v%rq~>Jk!Qpnq6`jK~LzKjHY@VMup%UJ@?xqQFVtie+#(uXDgFbF0XUJEYg1 zS^Cd$hqEF~%#U|`u~G;baXS8-)Og`(Y2>6j|7(XBA-MsQw8H^X9TMr-BgN zsLP9aSs4mfeRQgew%f=(#7l@KUkxe5vuWU|)HtV>YE72};fXn~VG8YZBNon9 zud2X}1RM&D*dbk$*V(=CGuZ1*hppKMKT@LOp z#2|*V=@lbItd&>HbMkFEQFPC`Emg6uz{4JwD^4G!F1?O5LDk0_HMy@UvPq=C27tYL zi@zW}fhG{`H|9Ss#r~ta;eWAUY|5pAuS1K(&*Q&tfv=vb|sCXeMR36}6(PE`?A znkUjaQsoC8;ySFo2)7r}GZSKep5r;Jm<1P!Hcg?mc>t|YRKP0jYPcWbRW7t&tr!(s ze9>LXXn2bBvuqsaed*NKKF9t@Ugd${u|}Z*%@V9+((5(VXYHTPm0_xigeujo!e6-j z9ZQ^kSAVk#O=V~DPyWTGsi;m_z}n|fq8idC{Wm3cWA$InnS6xBmd9t?TTEkMCUh4A zZ%Rh03()^yKnUuimR_Nc;7=OB`Qcs#9E@;CA(W;JKVAO>Ol-$&&i5C<8`!Pj*vFg( zOD&4Ye7n-wdiT#aa@A5&wlDo`VjiADPy7`@Cc5I8V!>C z7U__5Z@=$gE?lC-nL%rth#HQ@Io|+3?uyQ{UtAEy%VdK4BaU4)ylRJIM=w7cQ)mt? zGOqkXnn>_axg2g*A-9hcPhedjkZm8DhDa(@ChaC+KM;O00YOL@R7My+#~$jT<_ao_ z4I;LObU9IA9xVfM-kg*#?~i^#igL@#eY(k^VZ%RnxahzCdPqY7<`^dyg!;35Q-yM= zvk}e=oJj>kEMGHcoa~4gXD*YQ(je5PxOW5IspM692V5ez*6%0{nNJy@P>zqStsSI) z9`CqPE0_3jouwhD4kd%P{9qFjRoVjaFj$hCT zgSq>sJ5b=|4Kv}~f0tADO6&~|hD)in7AKAMzMRe0QvN7>BEyee?HMVK!OfBE*#mU3 zuD9@Pa&IC1ga?fDEFx)~z8Kwb!_b?|VVhC!ER6N@; z<(IyzOsdn0O>-E(ByrR3E@4S4xVzGZF!4;KP!$0bVt9nYkUU;CkRC4;&oM2jkzRZy zU>}ccKL2@A!2AH~Om*CbYzxLC*_ZQ2V8H!Mzs6N1O5XM`2KEVx-EWe(w|2rN<*io9 z4Exz2`E3@)r$?t4ZaHjFh!-vyg8pH`C>snH9b=vL zO=tGe8)^_he}RkHC6jU?G-Q&U&g|!%FyDTBP-y1Sc6w7D7Ul(v1HGp>1fwmE1e=`I zT8(BeXv>=DL|ku&1(Wt93>@N@!bi1ilrzRG-|jd9YY<}2H#;I$c_(Q^xR4;p6oH%9 z#cwNpZJu@KpN&)xrjYks~%TTS&J~aq5XLx}iSfq4EIe>k{mA zsBvjjtYx<8vP`v2fFl_ur3j-y0A!FRJ}n`7&Bikf%K7!fxp5%N?9YOPpLn5j{}>Bt zrMI>E{K)v5NM$gIPgiPCa&YE!s@>Mpi?litm2|AwlG!ujLY9yb`Vq-C)v@H4w264q zs-c&{Xl_n9be;=*DUJm)xw^de1(o#wYRrpoQNOS11SB)T|3$UT$Q6I_QWjew;r=5G zV;L2!^9pHNMpHCvyOz+>L$1VcA%GiLaCBu7ZK0WbU@h0Vd_(N}W@lG9Bpb!${Oj2C zN(4%x)1|d0*xu3b?Aw~} z?GQlce|~)7SqEXfQA11_wS?gT|l#QKqwC| z&U2~Oi5pVCCT~X^E|@7@6R_$$uW#p#j#8#SKitIO!Om+vWGP_*)eICAl%a`sB_Ay9OeMa>EUUCoy zZ2X~6dyu;($#b#&e9e+f*R}0|u=9KRis-&0nr7LLO`($%Z%WRjg22z@e?Lb2l^?^t}yX}>EG2e=Q0m`t~Vt6i6E#xC$E+jx z+X%yB&$`26Mr%yAy;Gsx9txzCVLv3>I4n!v58NIH4EOqQn0dT2pgf77>IuLZ*ccdj7#=fh^y}wFl)~S8dOfH~QEsLm zKs4bbC6Bja#K$jgr&2EW0tQk=uIUs=pK*T2c_2U@xx2S=1&9tbktBmj7(bnzYieGz zJOU0Js=!*&riSua)|2QB{WZ|1Tmk8@xt^gd5+Z`H2))ll6U`z6Av`@7TLLq`lmAvx z+cJ&T3V3;We<+dkOv~ZwX=jdHo37F|L%*)7bVZJS>O2Zzt^cxZoVK}DMF28*u(1qj zED5NrYN5)NR1a|}{Sgq0k3F@3Per~qU4R?tf`3@n5UqkFQn5C@i_h!PSb=W8seu~Z z3K+lK4@~bL+d@8!d!7V}WHE!|m0i(~>8~onumk5g@-!vexN5lxI>>_*p@zM$80TB} z+0E(Ud`B1GK2vB19Ux2W6h-j25OK&55DyDv4TkiE^sAkyz?LMT#@W1+)wE1TAgROU zp*V}K@Nm!q$TZX!gh-zBt);Q0nXTFsjGY5GnwVw&Kb6C50}ncOzXKA)zA zmaGGtqdQ!(l?Rl)@toL=-tZmuw-28#ucZrO4LmyPa)Nfg=S!2f<{1mJe>&X?4r5&{ z@cKni^4P;%vQm!!hsQp~ioISkhJak9HHqP<;Z}N`M_;AUR&&mN6{FtZGGr9Ws?s3N zl~^%LWcFuXa{0(rCC^JZvFee9Ne!%Z^ypBf{eB|JDnI%c(w8J>@zPo))X1EV+K7{h zxkP>~pGK0{kr)^lv^B&^XUx=rhmv1r#BfpmV8JL>Ef!lmf~ZKNnh4Lh-SYu+A)cV>1+tw_!J7yil#xACyx3G(`s z+$o83be9MiFOIZ-a?o1;?PQ1nqA%g5DmpAww~%YBq6;gcr~U4?e(k~h@9&&((}9E- zi+ot@9+zY>d#|W9@Xc)Tqn)H%;+gkSSZT&EauQ%z zH32&mku<@jci=r_gR)8HUQPz-axsZM&S@N4GQ-Hel3i*EFu&5U$H$QAHi|1201|`{ z4M7JfsoQ=g5wFr~z4q9XPKG92a*x$z(}*`ak}`j4C|uIc z(xH%J>12q~#BbO@1>%|5S4aJk)|ko({h+n?G~t}@y|gwM0HhDiqh9z}4zo6Q!HwBR{PCzDNEAkatGR2`uv^p!c2jPL_G?3+z?^ZJ=7AD{|7Nn9?g-y>@0>&F~s zL;4wN^Bp+_2VnbZIODX4Gn}cPi=I-q3y8n0h;30D+3b_W5=c>LCZiWA?*1Naz=)X# zq{|B79E*pHJXDSKqRSclYob{_s88Q)<|0OpD`SUJe;{}4` zSJk`dg49^>x}VtoWxq*RYF-KZ)>k;u?ce`5I1?@%|F%&PkdY2&wIMvW(w;AQ_4^l(|H!G>St!8CT7te_dXXR;c5 zD&?zdv=laj8r?j(0`xGPfzP)@a$&De5)wh5yOGh30|Vin^4+UI*Qn=Is8Utgc^A;6 z_Z{nPX!CiJx_@n5l5KFENbYJe zcrmuxLW`pIMvww=M4yn({X<8FkiyVMo2`2^NG$#cV5 zYs=AWYiv-%+NM*?>G3ec$G(8(y)onwf2rDyzrnW!)7#y)J6k`;q>;t_d?};^{Ddth zbuxQX*aY^{=lo{!NybZ~<#}yu()PjAU)palGhugr^r6WY(&o1exs7qY@`n73H8&jX zBqaLZ5#YI#<+-^p;vM=O86IB9M3$iS*~Na2zky%M{7eqSA5HI-M7`vzcpm2y8Q1q{ z0Uz>yJlGD^Vx>Gs$bfRY8(uftn{n60RGW;;8=GF!*KIaQALyuLC;W(~Kc%o&;awts zMwv#%AGUwu!H6GCH#&SMaNxF8U6hq8b$r%#-Q$VrK3Lm1q77ydR-yy@U5aR zcs;(B=Jw#VTI73$W!TNqoSh2SX+FmX5kJOc!TTJT=<_d6_~CW)`ee7;cIfHe@9ZmL z2CgKRBSzXI%kC&3t)kB;{x4;aDAd>;#{d0z%i4VseM1?%D<8-leyKj`UF0~f5chC5 z#nNaKg^_V9AN;BAyiGbqAudt@DztwGE6~{83XwJ*9l+OWbo{tC?oD{EOdiWRf$DrGKyMjaK zC>GValx@4FR+zpGe}iaB<=>B*c3rTRtm^e$ML0~Nzb`s^Nq8O%21qEEN*VzOO>OOS zOywwTJtE(f|H!>f!k)>amJsyJHBb3+Fkk;iAo}T1eXx4UoWpEr6fh|B)8AMJSW2ct zdKYyd!qcf2(nRwR{%B;|Xe#j=5MQ+jBK+Lo*4F;iWFsapR0WFYmzAhCHJ0Qk)1hBh zMXFHMbFh^INBxD~)HByh>6|@}0(aC#YogpB&2!iR86V-#*ge!XQ{Y} zGfo-v5ckV?$o%wTu|SWYro@{jvyt`bHxE-7$mB?JS7giX6vWKj#cn&=&X$Qf7iBKcaeLN znCEIJaP4Jai$*J=Gv<99EjUOo=}vD=R`ni4e5vuw7C2vpH%X3SNq+UQ;E1Kv2^s2* z!p9uMA)qt}NtHyya`(vVDbeaNRT)b#bMoQGVfFZ&LMyI8#iFznlG(>9jiMV*-bk#? zL7L#^Lk{F*q@TO#B34BS9iI;9Ux_$cp5ZUU_oyx}U}?#+vJu0f? z>G&p=y^xK}TSs1Dr1v&EqBXTok>DGi1pKLT5x-fO`JKgn^uwzD2=?x~?bb{oogRtY zdW+%~{Z+}G&|0=HCR_bcZ)kIL*<4Va47VCk^Q={ZAK1NAD6J&`{r=U2J~FyFQE6xA z;O3?h?zrrPuLjIK+{Ptn#*RNEw15tK=SMXT;Apmz$O`?;c=XLozCbVZit+p;#0kDX z--%6$5opXex8_Ed{LNtg(meE>nTb!7!fJ@g=JfnPe*^My00D@Oz>o-hR15A*&OC-d zS}Joq@)nm2r&ZVi`1+76>abr6&wXQZV|2z=J&5Wg`VBy`Ma^=MtPfqT;vo_xkVaEN zl1jcaoeRk2CYRR0mnECB?D6nWTkNIa5Upr6pd(82JRtUAjwMrjd;(orS-K9F* z9egX4W|?2>yCiY)N{bz?o^_NV_;bLi6s^a!PrI+BK!VYQv!6~ThkY{2op?Oxh_8do zREG)U_$h00fK{=iVoU3YLAtUxSw910GjS_nB;@-*Peu<)uBiv!nwxL=Geo=A4Mgsz zi-KSwqP!jiahhW}8@9^qNmT}r zl!Xs;MSHEu2N5`g^k%Itnmq|s8YYOAR~b%yoh$c;n84T>3diZ@XHd5!RT;$lAxAFr zO3Lp8IHr$4kD0nj!~Ie&%vy;9NNVV=81Sc*#r}gLpvz_1tqlF_(vnm@ZpLcCi5*}H zpkrK!pF7Ir#$16FR)#gW&B@0!cAbo5%gP%CDgx?>^rs54N7nmXcm#%QvdPDiUln;d zdJPzcyn@E!CU$7~vxBVn{|QZgw=}T0l{`pK!HvRQp5{v+W`#!lvQ`u$WriAM8?|IX z@Z7i-YkG{dalA;iY{ve3)^u1no6 zY5sSMg1tV1o~NjJZ7Jb?xG99|#KMgDz)4w|liZxmj~xyLTt15d7tgOTutIJA>iWM> zZ$1O#&a#o#!fq;xjID% zlAoREf$Ku<)}5u;z39(P!B&v*q48&0B2CB7HbR&(19(<%)5tcf^@KTRYr(97Zo^Kz z8daMS_+^VG>%Une;ssD+@L?H<`a$sAc{MPDJRlxB)3AJ=75@X!?{+9=SR7gD5wFoZ zuTgez!NyT~9&>5afz`I?e^jCT2Q}iBpA+djRx8&C7{ZoOZ=jp^7lYdo1*kuwMnit{ z$gc*L|Mv}ObINR!+ABQHE0w&JA8ZIGWa+{4N*QAR9pSps%BDhy;sNcouzH>?RO$p> z?T!f7Z|00hO@s$5Xw?5v_d~Rd%KSTiIU_$l{o9opq0T^G=yIU1rpXx^6sx;duZtYz zPmeb9vjx=gCS(SZziq0*wJfv1#z91;?eHPLPjc+g7g89;y^5O$?@rZz{T z(32OHTs$(+(FtSIrvqrUl#y zuZOURPdk!G>JrjEYgxKBIE%J?RO!rIhRBt03eau1d`wd_6 zI&|(&R9fIz?(f`dCP%&tWV^sF33OYw)@+0!D<0XoJF6Lgz!dwdA<4 z(5JtlK@j70T_cyO1|s|o0`Q|tkA-$DoehPF6Ol5>!4E3&nCaD4#7Eb%;J{Z`4%tsj zRhVH&bG-*=li<&?xYswOF%(&4)J?wlD(dhJ&N84po0Koe(8_3o^*{NIz=VNW9(&nN zY9vwR1t?x9e!ur8rc^!%v6R%66?MH#hXtn<_}ilA|I&V>Tz@jVeqR!ImQ}+0x$STjD_dR+%PcW?YM_Npa z{le{nUQ;_N^33V>0yHnz(&!GS*zH<>F>3~!GF6he5gkm36J5=p4lW5pH_74@=3HxQ zP58f65%Ee%*k|hcl^DGin!Q~C=4NM#Afao7>mwbcJpF%OL)@3|J@7kdia&yGvl{Tl z3w~1%HPXSZwiLMUM-mYs=fv5K&oW@{U8Vu#x&ST*5&_L^HA2*Tww@X| zH1&jVqMn6-oz!3`HuD#uTI7!;v2MgWha;wphCSs(Xn2F^SU|`iv@3POL`CC-eo7_N z$pr%kvr_E2+e}uV?epJvKf?E@x2MRK8GhdM^`a$w9&$AW$aYI`&e-)&V6v4fHaFYU z$fBr?t_YOgZE}LJ2AFP-eqJKh76b^w`MbH(gHRn?VU^=~%#tT7nK}O=I40c31xwiC z$MLU9Y7KtQO`kjw&c;`}Uy77Ut|JNm)F0BHw&b~GRF=F(GBEMjV>HsUZm%3jHF0~u z;D3aL=wNK=)f0@gYKjvxxF@gML$%vP=u zucin+l@9ng|mF{NP>U&n^pSdNG)^d z{I5ar>%Of4@*t) z6}Rw{E(EF^1QyB1e|~y`?y6dx_&Xt2XXLs;cxmoOm-DrFmqM{@(zR_FHCL5=qfeia zm8M4Z25Z81$pxsNs8J${*zoHlCb^{w!CH4gxv3M|5_i>W?pdwn)(4Iiquztg)O81oFG;H?x;*4!5;tvaqE$DjRTjh)Kab_`mYCe$erxNn#~p1yFj>)Y77kj z^6q*DOlxivkTr#t$U}kO&U`wJ%0n~%4CpV4R#UrD;uo93>t^$%TrdV{G5`n7LC9on z7C|rzc2>XM_o6R3hc&1XlbNzzyRcyh%D)~i<>60wrL%unz{QwVA~d^5jdJs6QQz^P z-PvdNrsNnR^`79|TOc2?M`iYp&07TkM=1=r7?cQnBHUNv>Ou#(6_7dWdWv$uj!D-& z7b%f*M%Vo1d`(HtQ+Zul3C0m@wAh z1u1{COZ)(P1C1)rIG^HAz?PB6lS^=lvZlO<#cBtff$c0JxqzohU&%B;hQGq+igUD) z$QOv{EX;F@X1H<9jhI8Bz$6NMFFM#i@ zUdz*jq3tHyu`wH7P}eq5uuj58Mi>J0yYY0;vew@VTy4Lgq)|d*G3y`4Pj3Ey9Nf;& zDwy`fVg>x(aj1klK#z!h_xRc2qAU0huP8ba_Ln1$rMk2>38$5*YCGPqEdH8W=JUrW ztk3&%+OvD};h!(dc}n0R;`NbF`LW5JA+m1~CC1J8YX#Wwi}yHxrLSyGIrE^4O)R*x z_@uKsCYt%&db({a6~6yg&~_2<BZXeXH=CfS zPi703Z6BaK5~YD512O3tcQ1FFi+b(oRAYMeE}2)Oz&p+xGGK~%ei=Hzx;^K8eZ9Q^ z7Aei_?bkoUa=ugC0M1{lj_cB^C%=VqZLL~Ul!Yq8DysZ6C2Cehk^l`3%Lzqy-1%JLEFAzdj z(DNh{7FqgXc??eQ+9U9L13WfO>ME$mIBajXo3GKHh* zm;Ho=no)lOqB<;O|A$jbhnrXMc+K;JX6NAf>!~-wZ0w}W|G#l@MpYH1Crg@yx4-Vz z?e*nwwa_eov=C~jN()xB-rMQD8!S+cyFDPEf$2qbA8ZQDOMN<{^?B~j@US27Q1G_~ z{f<5jT%HXB50CL)Z2tqcEKEe_u^5Bl_L#jHkS8PA&WQuuD^xFpj)3aQ8;%bLQs_q; zuiu!||wN=zdNuP_`p?3--a|n__8M(@f=?AB(sw@0oXPK`^ydiEtxG!rRdm)r6gW06o zpF0|}EZ8w$&X?5qs5<3hqJ)QNu?!^)aiO5x$ZE8=SDkr%(L{6c`|u)UD<}B?5?u zb7FB5cF2bZLD>fC(*^SQ@a_)c4&LS=Zr5T>nUa%A_#7oMXl`??QS754s>sorye{HP z%x4E{Jt+Eyt_jfchKtQGi6Cn1NqiDr2gH?sGlLjR?{Gq$(IN0T^~MbO+JCW}~~ zUjg`ukosy%;xnaQzyTZ-i#bx1&#+sKq<4m*csAi({D<+TFKB>Yp=sosxJl-)VJ1Pn z(n_WD&UTV4UlZAn@MJ1(6FSZU4XT&>Z-t*(;vvuk7Z;eH$c-!?K_V9@`KgT_P?r;Z zUWC;}rC*)J8)BQ-!Q1mR=;XnY(<@bi1}7vD`vUHg?Xepc2ZzD?LajTY6CDX8}%Z|&9+_<{Ya}i(7005PGnKXG&61YK_E=T2L>4!x{*W~xrAR)q)Zx> zu)KGXjG7yA;*M#YW;gy@q5o03UC~lh%xhr5J*E<1J7$)KfEpGwC_zfmh=s?Y(*z=s zX6~Ba*Y0)vT_L@%9Y0A&G~Hx~-}5FWgV8nhH%iI=s8>bUZB$e{-17BN=)(#GqBzY$ z7V*0cn~Mk36|~xv&MO3dKumR$wQq%$<6j6&*)XH$<>cLKfYZw!AsTW#>plx~1BZvH zgg%C=E*sQ)oj)1f9;CqSPFK_LM4CgJq@&^Y> z7{?LEW)aj>&7rk}9s@HcqH`@xmhelrwmK8sSTg!}bKyf?laD{+Sfqc$g3qaq)(*6N z>a9flp4S@}?AcUN125x5eX5$nCil&Qs*~ey)il~*A+sa{{8EA7*q%OG=1hRAn=Sg| zm=Nml^avb{&JoWxn!}>d@mZgUYtuc$4#^fkGsSiI8+z>?U#J|rQ3SkDB8k7J8y6|Q z8cP`OebD#_qBo^GP@EIUgf!1`DwldoubMHDEi}Ln2_Y?~u)pl@(lUv?q_y&;IPi+E z_l>cSt+mdD&VEF7o6J<`duTIVOLT7ayOpQD64gN3uOmWeq5A3eZ9tAyF?7)w5 zMwPz^AsFvO6R^SJCd{Vww_mt)gTcW5SH&Hs@HRoHDl28)#|sCcd-R z|NPy6Vt`FJpIr_cv=Nx%-GS1mGnrQ|-_M}dl3DjW#jaU6W0vzS{Vif`ZLQk-jq3A^ zHd#)eJt2{Z>`>cVb^DuZ&N!~ z(#Ynn*B6m*Gal?@OR>7&C-B`4wXUe$7gNanH0o*Bi}~IDv#I4Rag&W9j8&%;Qv}f4~_m$ya+9tRR-#s4kjk~d)w;`wizulhkZpf?f~nMgsllMfm~c( zs-%1@um);WGX64z?)e0SoqgAF@1zhEH+_rGRAZI3hTHnB7c15OCq(Sp#Tob5A4SM* zC~*9C4~f%w|8|F3MXcY^MvsI^_BD0XTXykG^xG0;74y>$A)H2~-jBoC;xg5YZQvzJ zVBFo^T^+c5nt;rXGFsm8ZfjhPg<;qpE5#>sQ6Mio5IooklvmuS?q6o_M0dJ1JEb0% zSC6FWjWB)f8G<&}V>#rht9c*DD14#b71k;Kh@d^^iMC<*X>>vTa_|4^NU&e0cE43Y<>77fzU|J-u#L;{1AW|a=r&v%Flq=^6F=UI#&zdaZfQYRwfqdM1BRjgmfjG;NN%bkl_FfvVK?b$LklDU*rJI_iC`4a)NkFFozel@~tN!VI|qYu5A)J_Gz; zH`UO;ck!M$v8@*Or+I;{9Dj=Sz-eIiW+gHj(1rSaEEB36Ip2=rHL{2HL?gk#jZMfQ z03Tl#Y_Ily$lzh_umPsy+rPQ+#yM%#@iW~O&_xcc_q9sIliM0Q*OHsIi z=rCb49=jfgu{N#QNJ1UOY@?bcHB|P*BXIL zW{tqQ45X3dssJ2Rcg2Il8M6^)J~sVqXD2AjK#KmSpcV#yd`xvjW#QEdM_=sL|2H%SmkZKhy`l>QSc>=;&87#5@&OgrXN zuMBq7Abkfg3Q(dCn|PXo4qv&uH1wwcZEdBQz%u~I_?j}sb2vopy3asnQ1UM26ea3V zS|iu`rk@?&!r%}-O_ZvW#Zwq%VBWZ*%Oh>ioy`TCtK5xtyY273?~b4%A?Lmrw!yKh zPHYpHx!Ur{3O(cH@K@LU6w{&ZV^TB2a2s+{9Z!2)>$Zq8EMy*f3HhJadBEhEfhM4g zRA6Hx_!m&Q^GpV-3%jO}`ST|8lf)3ME8=F06Tu?;F64H;?uZSbHlA;EM9S|gGh)#% z6(Ix3UwWa5?p=r zRjdY}gP!G+YhB+AR{%6MIHccm83hDLyE-sHJQsWh0}9S0An#D(rk#R8a9*(%5eo%bfh0TFwB93louzBU)o-xxmFO_{DWlgHrK z*VAux3(d(f%sZ$=0W=d5!AS21;`1ccCBrd#tqTl$cZ%fA3^?6@?kRFCN(?_k=m5GL zc}&wiU*Yd5@|@VR6jXrM`GD+6Y;Xnd$qNC=68yp^Eyg z?aTI*TrGG1nq~~A$v!(hTQUW8e5a6dW@h8+Xt#Yd&B$56JtT{okeZZ%sSrlu+Uz+V zcaL#SdNlNPl-ykJITqI{KgE#c@lEx_%O<3%rT`04Fe8R8BRwV2J^d3QWjiKw&HioZ z{Ay!gS#!qRU}wa8N6i1DFz3p;<^a%BAwm7aO=s_^PlahF%`AZU0 z`u?3|XNG^wJ(6h@0qflQQyjsoPd-iRb|v}l@Kx}0h2zt$ZDC=d#l&Wu$Rje1baX&f zRn_m|4`XJ?hrF9UvsRasga-C|1QSC;Qz9rw51xawv$&HYPbaFKL|%`(fN4Vxjh!cUd_Hswz2s$ zK-?KKckoGY+I_*Gjm{P*e{cPl=Y0)l=)}63d^hlai;^p4M*ml?YEQ0zy|Dj!VJ=|uX@57%aNEDuC}X@j zP^8szLZgr|M`|B)Xy-&2hdyvjBMHhkB)mqoiT#=rhI|KvM}&0?+^6J`uWSMWyQ@r& zhCpqCmAo57N8ubVE2C~7e#gW&z_o$eWVp~Ts-j-~T!>~>j)OW3Dv6fraUQ)jf8JOf zESHi5t+voqXQ2fNsC_o_e(y@^zDPDeaWG#*{pg{d%rg(7zvge-ctVIiRCzB6SH^sQ zTHO4@+T^R4FL7CRm?vhk$n$E`hbFLDh|3?CP9Ai+(=d^Bt1lTk2g}XWUaoF3=WKQd zn-$cE51NC(GmgDFI!Z}sV8{HwR3tBN&)ll9x;SOC>#NF$sfkMDOBcyhO?)_Kvk4g6 zOfw

jT>T#b$x9zg3asH<=ehumkT$go2A)N2ssOG(9?Zg6yjJx;yJheGDCwi_jzl zfN>-@ytr(v2*?Hthv!k z=T3`R=}*YvO-TI;f~K=+ekJp?_|dad@I9vWTE{i;mA?@G&f7s zrk`2WyE5?WNYXg0xz!w{udU#sae05R7%A+V&gsySbLaBCUZB!Sl_%S3GX#i$NG%5m zXawzwED&lMi2_0DPPoC~z6tLP)Goc~7_gC$CVIF465ujF^Ep(cLYy zECWeTZ=Ji%MJ{L^rwH#lnMGg8xaOCxRFh^U;$o!w*NI{LqR~Xj(wUHMy2}i6zXJbr zF7r!ZE{Q}A1%PV4)j2qH@@cmCi{oxoAU0jZqB(uNiGM8LUro@61Y29Xit+_Bnbkv7 z>E0*aU5IIimAHNPurY62spZ(n7Q#YupOE6zFRZbS$1(4+l-@x!Wv~T+Azr3foL3aM zB!k!$k~xVcDH6p;j~cgV9*vTyZ)ovK-nz1gqL?PxFQjwYcw@^B_efMn*W2}Lf;#U-IK z!`ku|+O(&*N9JMgQH6&$i$C!&D#T|n$HYIGB>Pk37joI|BYt@A^Nfu2-|Jpz%AG(` z-a*`m5TgV*O*4{_&4i!FR#zQ4nw$2fHipl~s5xkdHf(~vFIFq)WSnxxuBsBa8tIY} z`74HBvco~nF9XiDxR#sTs3>{aa*_b5c=bbbX>B2x7mk7RS3-(nYDP>($|RsfE4$z6 zW?lQWyRNJ0c)Vw(C*&>>SS?``1eryb$qJV@S7I`1abZ(_Gx|l!FLOlf07%=- zq2IcN`cq6rsd%1-if6SRi$<5i`Vkmxv6+hHNEgPZpZkH9b_{88*WI;TOX9D8L@Ad| zKJ;+B?7LQMJCl{W+flmayK8m$&p2nC@V8QX&FagE_8`s!~N~dga z-~oEx~-rNZME)L$- zy8+6BzYNLx|EZPI13-4xM49nNi-_>yOfU;l@@K;C3ggO{Y*?Wc0e6cMgQjI_|Gf2r(XZzk=;1;|=#Hr39pzHo%MVnvFMl2 zVz!?btASl#_FjwKV7gv^$RKKTQOsD>wvg(t-R7am?kv*%%X_Hqz0w)R`DCNv@|k*% zJRJV1SprL!>_zdrnISO+G*f~eaZk*qaq}QwZCBS$8$xj%)yb2(9J+*_30)1t-<5}> zf0$ie{A&NgSR0dDiy;22?SPrN717P?ofLPS)&m{`IZX7u+YfxqDDSM_`YVk&e4p++ z*wCKdh=E|QW!0eP>y^PJIqvd#BKA`)Ra^A1^@nd3HZVpF+{Q)YH;|5eXNb^v%jt)` zj-8if6Wv#qDY~Z__K<4!gk23DXSSc~jqRJ)>_2Uvh^i*vZ~o7@Xyom8&)tpt8%iVm zD%U42=clXpZ8<%ihRl9xQDQ$aA$#K)1-?(WBoyh*wx<_Ul>W~g)YQ~u{FZNWyK(-{ zG(hAwfat-d#%^7p4LTu6#OILPvlfOyYIU}kv%wH{`J3iB1vMwUs`FLH8Edxr1t^#s z&6TKJ+}^(Si+tR8JDM*$UP4zqefRjb5(+rXoBW@lard`({ymWMSH%<;;E<0>$nJ7F zBiAN!Me8-W7JSC#W-{HOuy%i9eHxXU`Vf35CX(%o003;j*Ea;$xj_ z-#Bsrfdp>o>rGFt!ojs_47|%3IXKlupt5D6Dye8~ac%XUQPIO*7i^}E7F#UKpQ{3f zp(Imn;$FaNTu2p$Fq5W-y{#{Vb*7H+(lXCEVRCLY6G!^2E7)6AkK5B`HL`?(z0|?^ zy!s_a|BuD%HU;^zJKgndWkiQQvs@MvYKXTt5VWnFH_J}KTItq4YROL_lr(9jQo}nr zjP8o%oSaB9%b+S1##%}?E=BPv8}hL-b_hRO(x`%^;GnHk=*p-H zH}02#t2Q6kjacHrbxi0wfJmRbp<}Ow%N$I80KNi7#dG_^lNDDDW{E3D`1PZJ5I{8dQPR z_fx2STzEHmMW|7b+ewL?ZGPD}gP`*J-5I-3*~bX5$M&w14NUC$Jr)wLLU-Ve{m%Wu zd+bho?4Ouyl6~3zsf}Cu=tRQkfiI5*){`9;ghXM4zCjmlnjg}=!gDV#j<`?BAM-4r;qN(}3nlWEwI$!C`3mu324Am*WkjI4r@~2H zty~9Asq&1m->1Lm%i(59NPp+Y%jh5ojteF(K?;*~vJLwSD6ZzPs_@U}kGk|W0JkKo zABX~-nGiClIk<+^WuWN^>uu`{ld~l;^)#DEB|1B+yVpcz;HH17jG68Yu5lk#K&VJ( z;B&+HW}<1Gx}yLK7KpC6m>p+xsFcF|Xt()ig zH@d1i5fhwED9$%XC;J=OW|4_eX<=6&wCRNA^4@9uNq(PoU-oM{Hmhwuuv7WF@fALL zUdqO(`fQP-Szp1aa?L)VE^sw;pF}>wrbqWU^B>tftKt$HVB2ht9YnU=)sM(^PK|eK zhN+ez92IQQt3tb*RZ+O>gt=a5*140;HLeM=+Hg(_Zizz4X;!YZ z-EO8#Ii7r?TNJ$<`bJBp|0kYbY9J=%R57QruXG67Np}cNwE!Iehw$Lw!Yc=qkPv#+RPj*&~WDzXkF`SsGQ5b5R`2p{ShkpFQX} zRk>m!4+@{}&n`ak6}qjNG#3n3-%~wA3O&KlR!!(U|Cmo7s=L&_^6X6}+Y7^&4BE_* zNcArHh>_h=8@TAS&QymRpX|BOn80B)N2NiG{i!BSbXhC*44ZJR?<9gsP9ne62OJ})I+s$y&Cs5iPgpVisce|PuE>a=Y5)Ei$ zXO*S8;*qKJ!z2WiKOv_2T7JN5M2YEqIqC>9Zi$Y`xUYpZmDphyZ4DbV)Ek6MRzZNq zQj^;Ywa5|~+?zYFu_<8twE)@8R-Iz;Si|)y9i;t>>@R=wrK%B&?@D+~rwK=1vvS~P zQkiKpY8kIhPZ+DN`f>BK3u(jYM!2&3S$DCZTa=seVh7w2lZeZ8>acERYV;C(SiTaY z3%)g@RnN|lg)6RVXm#ql#c!sjiIKS)affcE3Hgl6g8-RQ9{-LQAF0<(%MxLLqc-m6 zi}E}0LQ+vyt5%%0Eq`$zyr%}#`C@CrtXT%}2ufaUiMK1W$X zlJqM9(!lNIPS?_geLyh)eLT}g7|;+!Ep4_F^@smi{?=b1(PZ@q(TjY?dbiB)X-n7^ z*njJ&J7chKInf`n_^(3(7%7h(MfMlkY#nuT-g+8pU+FgC*vwXw*JU^k+ki#O4*&3j zL+J}%L)~$zZ3WoY*|Jw377Q2l@kyBVb)weaz`QcUz5YVyOAb>RJE_p>U_E85wvR#b zbyLZO5X)ix!tL*MKugt_^8&Y^Le{SW=;XVPG!1=2bsOz0Jr3sP)LdcDSL)>@IiT`N zAtAuL(c-s!NI8r#w${#|oc-L7HKW$-gnp?e6-d=x+t|qKb$yVhq#8TUe=(|=IU7qZ z=x}|YLQ6ZeU##~q=JDFrqS*OzyZSb}o%dE3Dw;wm>Q{btLL(Dv;bGa*-P_YM^;?m2 zu3Q^xztKB$74F^cb%VD9jk0qy?tJd)PUA9nxY!aO%Mmq%Dj>nb=gR$}?>t~+_)y8{ zU*dcM1BYlaam1?IP}2G!3PnJadB`zD_ItbEr*t?UJ$Kl-^8LK4*PnMC^cv5ZXbJTG(cF;c_8&KkXkyF{W*U5^yJrX9 z0T%v0Mrl|NJ6SUKdw2dmR&Ol-7hVSNH-*nQpi2GKKNo1B{^2;coIcl&H(g;EUE96> z{)~r)umjHfI{0L}hI!BTy*UXKb(Cd$3IosX(%bEh+xPz}@|IfMU~bQK;>hzGH_{@y z+n+BN;{QLM-a0DE#tYY$?vm~f0f!P88VOMW0qJf5X@>3w=?-ZSP-&3vnjwbn?(S|l z&-?qnbN*%4V$GTb&+N7LeP5S@jkYMMm#b~(PsKr=gN--tUM3O>)GMSD%VdH58xH`K z1|bc+hzHmLP49z`)82LARb1Wl_>ZRSPS;TciXE89oZylP=5l-pcI%0wDdYM5z)`(S z0}yTqT%Z}dBvLM!7czRQ;AgQxIFLogUg@rwk$F3j$#T_*OI)oUtV5~)&?666`yw}YANoW zOG0U!H|yLcPN$vU++6<&^7W0zU(Aa*WYsID_tBhc=?p^JA6Wcu{oCexOciS4*VQS@MDW(lA`Mh;ggOmox(QP&&@;Z^BBgX!IIG0yt zA(b@L1mJt%ztY#<9(GEORb~0CcDV8m2^f9oo;$pIXm$J>3gq!2W`BU^MUcG1V+R9{ zytn%ZE0|rl&I`lG>6 zuj&CAVV@kn$W`eUvID0i8hHPu(ZERdK`i? zSj?~mMi8T}2(Cg-w0_6L->tC24Sxqzxb5K-$#x~^k?A9|%K~3E;4&K;wZ^|fnRa}y z6-PhG9aa@8yH!e=A~_Q0zK$BW_=TTb`3@RhnJPJjI_4T~7C8E^PyJ)zMY3`F)X`_O z11Ba(4vNWCvQ4K_lP}QmMo$%rAj;ndm&7w&CDcX-OzwaEMz(92ZXX3t-T2Udl{=U+ zs~ivRPd-@5r#(Aen^Jw1AwcDJyN7|M2f{QTNQJ{!=Ht{FaO`Se-M|<)lfN zz)GtdioDGzE^1iLn$@)Zvq*-M;B8GghO`XV)B)f_F>Xe@?|LxZhF;X~bVCaNWo1f? zjAXMuOpRXf&ZspmV@sDg+}~n)q^8oJ{l<_V@}hzm(F@oZ#N?51<7QP+Po&!Q0|iQtaVJdv^E&m?7?z;wSSGw{rr8a<%NulaMYkZEfxUmZS3E9>$N+h&VYzl72boK zYE&JCxv+}Ae4}R-Bd+Jt@2OuAg%-f@&Q^f~bE^vu!13!r*$awUmA8O69y_Cgh|F|x z*QxY`Q)3}0tde1lzM!lDa)9JpG!+jVbv*v$zYBq-7FNjba}TR=msZXW$inpoktd2F zX&Y(46jquZB&2VA+0TijXbUR`y>;+r;ZlJr_HOZ6HNdG_NNdc+^r4@-$q`MZwil6c z!W6Rx@;HPDf?HY4`15$zY@(!=A>%oYtIz^EK@H9jpO-X=l{bJKMWI*6u%Ec)D6#3Xb=QRH0+*i%*j}A6Yq!T~j&q4;T&z)WaR%rsv-d29c)A*!@$jg?{&GVL(k-UnT`@E z!OGT_NAH3Wt-W(Tg_YL8u+DlH>DMZ?vJ$PG!AOvm`Qz864U;(;vue5!dkq)b*&p)o zFQ4D51+s~MO8T7(j_}taN{lnpgD%oi;t5A$L}B<7lj&9iOhit7u;=1@amTfN=el7M zLXYg}ED6AruH4ITm<|3VFxsH(q+mY9$SIzc8V#Hv#~$);KAIor#m@=cq>PR%N>fh$ zb$_(zXs}!mr9{q}hUDYNABcWy_RI;cb#0bv_%wQVT2%LzBIBaUe1^|xfhOpRs6}U| z$Ve@QK;t};Ao$mQ%#Lre7~VvVP*{!uX*oa++T^V3XmW|^9f66H*8JPP2tE{#ifFda ziObI5X9ZnR%)`t?#b}YDuA25eNLt&kuL(P5g_T-CHWe^ss2s=^)wlk;lZGu zJB=_G$b;tF(!VMg+t;<$6jRaE6Y7px6{@ZFmQ!j|tt~V>8q2mNLe?*q28M{uLm7gm z{RzxzLJ*cBh4erkQz%=UsrxR_iRP%ZU2Le@j&zBaSdZv#2KX{MjF0ywL482nHXj&) zBp@gwKl$Y^Q28v5mRr5H+$@qpSwU=U9ajf4#9v>YZ`$iUAxVB3cL#4-Kf(@Jk z(*HXu!Ni2j)pl#L#&@N9M!vxlvs9wVHKebg39+%^EZ1wyZ>*r+GHgDzKmj&Bir)JJ zNQWQ6L3k31T@B_0WR0ijP%ep>JFoHhuXH(zzBh6xgS~?}ez#Z&TeG)YuvL*GRqSjp z{0G(Hjr*XAc*vQ!z=!R%A^x%5n=|oXpGvHtZJG`*-Pi9pujVrFT=b+?+ONoa5u&Jj z|2G$g5q+T0ZXsh+Ekd770pZ*!TdUuOba9bFdl3|WzN-CB-mzGO6PIeCew)0NVeen0 z7zwR2b}`9U?^Jmv#(WvcPIf1=-ucqC9dcsy@KZ=jU`MgkfSY=ujpNaH2S>XLc=*OlXvJBNWZ}ky{)0x@)ou!QI}(t%YZi zvf#n3%Rer4tc*XvMmmaeHM+k$kjK#ypCxuTX6D`%I$Bu=*tH6}HqVf}xr^N7uuqot zAeeY(715o({M?+m0v}MPVBsB2WShD;Pd@DVNushv9Q)2B3%oWWg z{Mj$_9y9pr>T>6maZoa%4!utGv84%psGD?JF}zoGzTQn%S(nx2@%l2sR!I=q*&)bY z*8-S!WZlRa)wa7vCp)RdMr)G^y>&mx2i37)^Aqyw3o z)6xk4y5spGU2E3c+Kp<_rq-_{0k--);7WoIp#q(&{y%qjbg5grh3VW6n@_WvP@X5; zjJh&wXREE~O|D0z*W*z>3OBvNkRxR{?lY00)&Xi5x6iWRI(vke!}Or;W{b7m@>oaj z8{mrdDU6iyX!fhwhu1_Ey+3ZR`s5dbTY?qAXyPB(yLDh??_Jj^%$Q51zKK%*=FM56 z1=3G%zzx|hD{_m+9v{UUsNOiU4)T6q;kD5cL9=hUVDXIg#-W$~&{grm?S4(=Px36L zQUSsL>v}3%8{_+XhdMA-`aWN}qSKOioK~?pWJ5@$yS(+RlvG;f1`s$*E!=i(Y<+R? z$Ri#Qv7BRDcsxkf`h82KOU+=CF<|4}#7%ppV1 z4&pkMCov8-4Mv&U5axi+urm>#+nJ78Z`lN}O7BM{A?z5p=b<;9R^|E$!4 zXzLo6O%!MVS4K>fD3-&27>SR_TFf0uO)SW@C{e_#uYV2*>sP||Bva4OEeiJ7izeKY zMIrL|Zbss-V>o&6Q%t}6Q3dV5Jks&uO-wztnH>V$-_XCKOpc+F6!(;-c=VC6?*%z% zQ`a$2`(&5XqPu~(<5Y=dNqsFTjuSqkS1@)!Cr|0$<5k+@y9yjD+e z4hEUb9Cr5Q!*<*JEHol}{@nNA?H&-&14|v?FHGrgX7%B%;HIzEvp`worMdV_+SK7< z&@#jRs}_PvY51 zvkS@$0Ma|MCUv;#dZQ$BQ^`whI0wdf~KYoW9w)vcHFF?j6Ligp{= zLtdrEf6XNjc;4^s=`jk&k7O%!rW00+(#vaGupLfdyJcme$4*r*{M>t|no0UlFp<{iw^1mOt}8%qp9JTder2Nl9oLiqM#uDR8K@W!w{v6HHbr50 z?QwrmW-I+-3Rp^Ssu7l2z65VO6Xe8M$8j*c({Kj{ec`0^m$b%MoE{b?O)XB7auY<; zrb>G~QTmZiZEwfa+-uY}wOi}VZUH6@NqF1@#qjhz%vM`Gp{dlqBNio6CL~vjzG}>k zg!?(=3LPdBtm^#Q?@oxGCv|301+G_utQLzAWuD;zG7yT5kHnSu4?%TrgQv1V2%2DF-*n zn0e`4q$z>z)*B7trf`f%Q+!64t&xorh3;xSn3LozXHBVVjkWLyJu(u(RLX==I4rUZ z*2}>D1!{6@1^C>1_j~$jWjt7`fd!yBiW_F+lp_2=3;7xop%Bps3!9{QJOvD=;KI8={GQqn@$N;$rrhefTGF;5`@i zcUI`T$id>>bN^#wx#X;l1tr9;SFd+kXl}>qUeE=r@YTWR z)TvyzzPzi$5cBClo}I(*A5V0b#8>dhh{Ow(wzf8|=Bc0Rn%L@95uevYk)r!a>&@jI zaD0M7M#OFeA##MC*)Fn>z9yMvcK58iSbw&`W{J@R&u8hYp5#5ab|0F@`$p2Bu2 z9?Nb|IRSPk3IfsCHdHFsfB zY3(BA@l4c~qf25jdQL>uVSj&zf_EPDsP>-pEZ`p&8Q7vda4{fYjArnZ-{Ji@I<-ozrZeYC{itDLCgd5_Q3Xf}$l&o^jB)|39qoouBse zz2@owaUU9m(W<1;D4o&1+Un+~k}fWyqPKI^h3J=G%gf{NCNx3}wP*1d|8iUK!e|BH zn`*A!{bz=MAGct%4(xoW3n_}QP8>)aQ7u}IU1pdpAFI*D{TG~r%ASK`x^KsxxJ&Idv5$JGU}hE z>MB;}5B?-Ui1uJ|9*(czQO3ILXNtZ|MIK??Ds1GSrHhSY9Yi3uU}C0a9ffHfFediX zM9skm`=%m@zHN#dUO@tsy;Y1BX9kg4g&PZ4^GaDSC^cnib0~lUmA+c9q_Gm-<;~x} zl@0A~#6uQ~1&JJ<1698ICGt=EvBx6@s5xlKS&S#>zsWf6aTS-)F?5bjy;ASJ1b z7d!yY%O0;#;vaH@FT$2Y0G3`*NdTNdI5cRXIj_0&=Crg_5}e!0@o1hyo7KZ>7+Qdv z+!#Da6)0AcJ26N+x>i`dd;b^hzU8x#ULHDGDo4Q7)j+->2WhYefu&}cy|t$MC(7Zs z%J(>XYAVo+Q64n3$;5+cvhJXLBlnN`*r^B9&rpS1#RnpUlfw>5EJ2f*;VlYO0U;@8 z8-mwBu=n~@N`^+uKfLtKmc@x2)PvCA!yhJxwZU~RsJOOiflt;ftEZtq z!so6)Um3O;#q5Zy69nj3(_dNhug4C<;*U;@R+>J68;%ua>>ltjyh!QqMlpX;#c!Z# zuJ*-*0GmJ-0EtCmkRgh&RshX^IORd7I;Uv5rj4n~LmKU4C7Kex%llbOOW}$o`>-s% z%4bA0e?=9Qiu928$ItNH?lfsFkO|MXD>hQI-$05@o$tQ7OJQ57d^0d<%jEe*z0KFO zLEt;)&ODCtd@JT>4Q`OB3pStHzwou+`e`Gxt*;08+N%huVdcM94}*EP2L9^K`meuc zQwgLmA`fh$%BJTeA)u%kNihFap1g6Jp1`aIJifG{9VSD%dU>Lsg??lX7G!4dWr{AY z>W$%mk{4G$_X`_WMT$V)isWTZmg%&<_HNNN9hNgCa53WHHJt`{VM@7uqXX9OUu9Br z-N+e&nK#&CKjg0Mhx;IhWxkqkTiKRNvuMr_{^O@=BIj9M_Xd8%`KUV{krBHuk; zK!eYn=_ky~hQDV^&^HD;#n7+UR&K{r$ojYI)pcxyIX#6#pcI?+k0@L82ikb1?d_SJY3{>p!F~UgtMVjy(A9mQe4cg5;jB$ZXw_B=h z*BFws1m8dYygE)`FtWISvqH#TD2%*swwZ?vy&vwG^@RK|HAk|#o^)qUPb1M9oUa&o zClcfRcqvhemDXi6Q0)wwqr7hH)~1iF8$7o}L7x`nDa3_IQ-?>*70@%^_-t+DU%EWQ zKe>?R3=dAmO7_Y=t`B-JG2j2 zEz!Rd{>@Oi8z=U4nX`N?DH%4Lq(eXpBUc0Z)Kg%&G2=w)CD{zx)aNG#xuP%7B zXMao|4{a0+5hcC`DxvX ztTvzcmuLo9&nTqrzWjcy@#n$HyEsx0?QYqcyO%dDHpYp+ zcmS8NpzX(16g==M?yuGWB5d=Wx9O{3Pt!t_xIM9wnpcmK7 z*1W!V*OXf+uLS}(IAX$@A8v4ucjv0|C*wW!HyeJZYMNgj!kg5`+N~8Y6I|-3*9}R# z(W9OKY7HLspdD}uLR`@Ro%#Z>h_?Nx4qTd_TxrVSQh9S6R_c82S(NwGIR9{%$m|X{ z9|~hZQ=>~Yyxw!MY?VH;e>i-c&qqgR#LRJh9lSb85;p(kk^dV2#9}mMQ8hpSHBlP*ph#3t_aVo3PYXhJZQ~TOB-o0TObZe`hWsbB%#WKV@2kz|GTkBCxA8!%JL|vIYTLp0Q7-0;SvzE7b zzBpMAxUj)apRwP%Y}pq_0jW}+1*SQ9yQ5y{I-!@+7aeWx%k5kg_t)?BPqHmX3q|(b zuqBV`#D8CZMpOI#%&wQw5`K=8EjQ;Gt#Uv2fsN5A#dgM1QMt`iKdDR$r%<0gUk!!S z1!clk@2`S~nuBXlO$l12-s0N&J}Rq)#gkN30N}!6*k;lAkBt}H^My@CL&iuU6)4;u z=vK~Z^p7*E{4I@At;7DzN5pT;v-K6hd`W-xSLG~{ox12M$f@KIcP8T4!O7)ABj%QJ zRRWgqKl_-QVmw%UE7s=E3M6U834WWd&xDma9bn@EM>0Y(U6a}U%v;62PfWWra8#j^)F)qK`aQPxf0Z%_=0AmX3h{{bdp=-V5bIDV z+jNQA$L+X9W0-o|gaoPyJ7DOgvo7GqXA)JZ-5b!`&iJooVo2D&`JD{;lzK;2*{Xt2 zDWw-Pcsi&8FKBuF1NN(8gO_Bfc3O0Aod4==??9&6Bauoqob|`B?scb!eCsg6ia?!X`kl8A?~1F1a}~Wd-U$drVR$6Qg5>Ui%&gTpaXId9Sxij^M-k92r*b$w26PSgLw;zBtthxv%lAp($UN)2HX zWhx*^t5PZ}$T^V=lWwja{!!2aXBE=_N$*a(i&ENoJ^j;RFgQQB5mQKsWy9NmeV1FK z;csplPfk*vAuNw0Gkz-mO2;pPBoV9si6` zhG9rf(ZqdI*%zD;quDor5BM`lmG~C3cBDa?z_4!f51U! zkz~qzHxlO&)3AA#e=QLA?uqC~^_0!zfmKSW=1FO-aH`@@?;iwz_5_WE+q3ZoDCtFzf6K(0i<(_otF(6fk^vAJ!|{>qlN z{yoo*)e9BDnUE9E*1l`kBKw+-y8tj>ti=tRMDeJFgKZb;tcF+pCKd0tQVqX9uM~GS z=0t#0EbbVil<)3#a$qGDZ4rQ@;QQ?gpqVl~-|W9#4Z?e;$&)qE>~^A2&kBL^R+J&_ zKApV9mKb`tl^?-;;oc#|CF9LB{sZiZN4_~ZRr`Kgoe2(l-k^Q??ceC$4);(1A8l+D zJn{OS0e~7z{eHOp5nIeTpdjcy;%e(H@2R2qB5Vfz@>EP6z}4$=@e)hr)kc#ik_I@v{kGH9QnifuHT_dcA ziID0CkO6pjNp6MRxk%Lmo`CL6kr=yx*M8UBQS<*>3?2oO{aCnNll<<0@bFHdES<@P z&Edi$k3jkTtA&{n7}UI`(yAAxRew#`!t;iTv1{Js4*o~0*^yX0O^~nv7Ta-}=fjg* z&xIQ1WuoQog?%zdQsC**ae691CNfG$i0Wmsj?WcEK8t}QWf*8E68ft=Y|Ka4WPJ2h z3lA^y1nXrNWr5Tv*DvBgkr`<`zia4B~v`P zx^FL2uTH<%?Jg9uqdrWf_)HOz+?h-|#oF-rAbMlp|m zZ*aG7elD(4CVjE4$R8H5R9B$q^#oh$m<;t&hv)#JGzf5s=RDka9rl%oCAP9!lPQ)qbxVk;z zH!teFX7>Sr-Z}gst|j4xeqELkqnMFm^w=_l?rkAQ&%In@!TgmWmEmm!x9ywtSP&qb zXbhj_r<8N^jbdyT*KnX}w5M6e8&hw=)*dT~qprc!RvC<^Y^J#Tq)S9X`pX8E{x6H| z<5F4qI{Y`zN8x`|Jk8r{v(vHoRa;0@50}zZsaK}D5|TaBGkLfh+1ZM<;j?S>K*>+=0+|ryt(+mqbyB#VfYOTMG5vJE%u+FlJO5c8GxUU=7 zfc7}{{kb>p?m^@;BPyf!re!d;AC7NOX#RnJ-tM>?8L&hN7BEH{6Y$2LzoChm7P~L- zkW>;qAq|E4ij7t3g7+VS^NdrK-yKs$?FirJ`HI+2+AfgS>bgiyks5|PTmY~cW4@${ z8_@x=1v27&fzX;#5x^=wJav#KwWY&$qJm?dxulZNp{WM#M#q`rD`!7Kb&7PS8x-FHWP9VTKN1l)KIep{!GCf-E}GRS48bMA%Fm#G${ZMcfR zj)e<$fWL=G4M>5lfC{-h;Rnxx(u~EX;36xPgLo(JTEsP`?SsNZfrLiCnZM{_7@siQ z6B#wbQ{I>jC5eOU^QqbxT(~k^x~xL;r>XiebIdbN(;Y6sS)&XdXyb2B2O8&$i&bUc z@Fc-iJI{oNfWi*@!5bz4qegrZd0K31MFLFgU8`-5x;&mvQ0lmx=dVmnCh86+TXcJK z@qW3jd1~zT>4ZD+(ww4dKR*NaD>9YCvPqQy54U@(<6>u)DQc(BWX4 z7>7A4H6t`owGr$lU>K9rxuh%T@HEe?*XV?sitz(b5cJo}EVcpFDC4s!c?-w)(@Hr5 zGSB_az{(~C%u>ZZ0Cn1AHa;*fFl z1NLSqHXLl*X|lM+Lvjj63bQ5n;Bs_P!smg^;r>UAsHrl}4NG*JKNdS80*&z)(?g;} z3Gx{SJ2j3sItb#;J{Ykyi=)cpb?83;9cTd=cb$~7ZbyndqMi$)uQu|$P1?8BQ?ZRo zCTF$NkF#cUWYoQ=L@Oy<_d`bhCh1jqao1W- z#PGUr&5h}F3K>;|qV((TFTf%NB!-0nH_miH{E?3wxL%qF6w0g={B0T|O%o!zRIJIq zvxAl!e()wCW_JN6#(>_tVUX?sF*-+u<(ejZhyCK&H~9u zR{{;{7|v44!aaSjqQffo7I-Jy2R@*dSVI*jS}c0*lN^+nOPU&kG2yGMWs4HKJa}DT zx5>j0Sb*p)VtA2Vyuwqyc=lm2nHsa@CXbiy)r4=v}f*t^}Oh?y?I-cro@_sWgHYJpKzUf+6L4)&`=`n~=4|A(8Lf zL!QV`iiDp&`%i$-)IFFLS}T9^%FVLFY-&pFRAOUt#V}7sj#U8 zc8uCj3yjK4A=MEAAf&gj}SEHey{Fr@dk;y`O z80O@|sY2d+=VszFKtqYWq0sAs_RWgVP1bfMZ~^+;6iE5bQh65>7Z>-#%kvYD>mgtq zU2KH#Z*pDW6Ke1Ya!SkaiC!KpoFDsMC1=MLJRT4SYHtT`UqJ1@H}}(S!ig6Pi-*N@ctJS_LI9C#9P@=_&}6!UEUL5 zi&f=5eV}!IMyVxW_+0l>h=8_D0<}Br-blusmGkEl%{MM>|Fa1e$2#8rSG}1nzEPs>oE@7vd0QH+?^0}WM}-^)aTvTb~_Qr*~#zV&PH-EJ1nq8b!@?- zhrcya+pp4*dR^EXf-dFa-dg;8 z?jz5cs~W2~pm&o{YvX7h-hTml85Dr}2?xoi#H6x4Q#uzd6jig6GU%ttmF`~l_hdaB)o$vaHSu6h{uF{sr1aC-gb6JUqti*NX{bU=;M=%_} zelVWA_H|>Gbv$Lwhkq2suLX1}-O+}B9RbT0P-fIh#J_O$@P(RJu=?SiH9*g_+GN!v z+GK?qW9Lgskkb~qv9K&X8G~mgq%W;fOp_H`N;YhP!f-Rx`K)Mfdh)X${BO+`I02!b zd2KV^t@4KS!rZu^*Dicjmea^B{B>xd*WZK05fphEyyT-OsKVbfqUJrs8Sl>9zY-3; zl6e?)_=j~AiEA7*S~%k1jzjhX^b}TLezR=3PZ_b(d?guFzI0ZqiN5cNnjin$YKKmA zr5`VP-Yc?YG}qQbbm1@3_gc%yfgzpFr^eJZopy5eNvo*y;NB5|Q0t=)4kdH1%K7&@ z8^7BoIhI5vd^u*@I4m;$vm`32{2svG@#FzOkE6_9bBCc9ThVT6dDtX=YwOu2)LTVRd6P{Uxg_Jjq` zHyLzSyStB&OZIo_g4_rvsFu&h!oj9E@4n6M>4he0E$g>8RA7vVX2*)aDIn`(XnfHsZkOr2Uc>O~ej%kL|!e5!@?@?H8y=*`pc{+s( z?Vr};`(=;wUJ)PenVA#O82J`fMKz%}n2APTqJDy5q;RGudss25Qkvkw-k02_6_%Sh zdfT8!i^H#|+(NY{YB47;s`m@mOo$d`r({cfpmkGh`>9&Sviye0;5c#XyFgxpkQ|U$ z-oc3S;7U_XH?E#~0{Z0Ti3V*x+=H ze7x8al_3b)xSzsma1f_ub((k=b{0}o?09nT;{9~Xb1czdK?U7WT7z!YH2B@H*e$pG zU2rBhY@~b2Dt<`A-B55L-N}jb{Zh0B0DjLWZ50~WO+V|`sT8060bb&3;jbUb{EQU9 zv($*aWOI%Hvl?DP+(sEz?0F;Xz%R&^_q%_@8p$-IW^SA%EKcZsR);eJ$EsNJe@bG8 zp$&PtDnv_RAB=3KMIqm99RHrA`$(rPt^BFF+eeTBlA=F1+uejArnOOwZTXX^*;YwOxjDFzhp0F%6rhEH*o`-=XRF&D-go}^DRljtZ|b_0{EWMSD$d}w>6j*RuK=e; z>iS#nheDJrB07TEC7w7HA>Y@&f`ssvxL|(oLyq-OGEkkI>;x`r z56Hu+_9mi?Tal^_&?9RNsyv7YcS_~uzqMc5B24EWF}T~(aj)P_$3s#)(r*U58@v5= zr*{@{M5jF^vXEAkLY|8JbfH&+0^gR4xC~BAc1C!>q-ECnjMl^3OIzYnMm6tp(+;*H z=}?JuuNpVq84UP|=oR~rlkLdZE3cK0(xZCu{N zx88Q*>~?@9v#kep7NU7Pc17vLmVcUGF9;X$(7(TZ|L}a!{^G~h@GP{vM1=QJ zU;me}aRe@=)gXujc$YSo3r>jkop6;nynX#{&$t>cc6F(Snu7H)EF8{;86R&GxhHIf zJPf@~iH8>-rPg>MTrX1f|KV)DwwyxxmYO^Aq~q}3cHCdZ87jo#@x~d_X%rf&kfGi# z3L)L1KhKi(a;@~Q4EnkG`IR^f-y9=gV00WWDaE*4?n+p2`ld&CxcPaCUtfJvaQ&}~Hp$Qr=PpPY7VW)9AF zaLPznh^hS|dJh!QVniqT`6Rjhc}s4wF)nQ44=kRjmQr_mxI5u)g6E+7hx<;MvAd6g z(j|s%bU9U9_kLfgLRE{VDFi}GL)_jP_Vh>MZF{BZ>MO)rNuP=HUo~e1NEwG{v zuP~R%jw5Hn?u?Wz`7ZZ0(1L<~8jY_Gr}2){c6-%Jt>i*?DQA(_+Hmkk%h{XS)&nX* zwcPJ+g|KmcM|Lj#HT`Yb;P-aORKZA;_cwj$I6-MgJ%y)Rc@zycv7*-EG;y14Luojj zilrCc<#{R4B?qM6%}u{C?_J6Q0x$taP8{4yxqg0EuDDv0`EtKYuTLHSSf6l7%$lxJ zni!dYs{X>?(3%vv79#C{7Mksd8!w@=)QT_0@G|i!pwb5{T&-EDyxh=qAtn< z&RT!2Ux{-W$$X^7#wrb+k+L?{bEj@uZWmvseqh%aI*ydqwV>OZ{FAy-s#&*mgZ+$IPW$nOgNByOzRMub-HS^P4 zObv4H8UR6c}g)8S?wmfgDGCJ%}jdoNO+W^t*O zDB}7ZMyA7@5HoU#wNY=Xg*)+Udw0Tz=`YYR#jo}|tl=xH7b2Bv+y9UsB(j3|v5ME9 z1ksGu{J1W!{SbT}DU20zG*8EV;d{UHAq$NqM_p<283Qo^HU*bndGV!FlX{~Tidjl( zFWf-Q_O;V}397QDo6bmIW6S9>Gzw3&0=rbn$%D->Xx8IdV5#8MrEGz#f#qyn4dMo> z?QhWUscJ^RfEhF}QXg#nM5^5Og;7NsJY)BJboOZu>Yxd8)K4nWq9$JakJ-ndw`oz? zud9dbPhNLVzRd+DV06w0KBtrd{SR@8tCI6oM+yMhfvM2*J9Li8}%+-G}tS zGzkm$BMeV|!^?oX@FfXNVmBkuI^M8qc&4`*#yTDsY}-}>1xo2?Cef{YVKMZXQF~3Ty()G3}WQ08REP8Oq;Q(sB*yaITC=4VG3)xF}8b$~m1EK1rTl>=BBrds$zQdBXwCY1h>Unr9NgU)ry15grQP*)%z z*>RxxniD4^D5f(rhj&4f(a2<}^M|TE#7d(6f}dGsmDIO36GGMQBx3AxV74#Vaa61p5~scwnZ{!lv)705SP?wiOW#Z z*}ONAM@6NCohEiPxEr=}x?nAIq;b!_zIaVY0)tW#Q1B7e2J}0N4)Xc%V&yC~fAi1X zSMlXx#{uC1ZdZGP;)pZgLrR5)d-!UyDuJ1@thWfPLLeZ{ft z7Oy}(F-%pzqtl?2Cvf1vX<%{8x4?ct@rE1J@@?9slAd#g2ULrb2N`q!X4U_*Sj<-i zh$>~LlBY`WVeWJ{K4X(bmU>AUm9C3hmt!syTfPvHWjwox#?U|gN4R;kXTx3_T>RS| zilbUwo|>Avjq#1mV%yR@+bDFwhvL<9t%>_85bezcC^!4Z&O~y?Ya?<}3w##CtEX$- z`_Dcb(SoDJO0-WVh}7#ZQBtSgTLZ*TEhe$=<%CmjZbuyD9FzP^*6g2FUe3HS+n?~! zCb(7|pt6gPkJcew0F}nteWyx3)=nFf`>`p7q^i@(8z!KBsrFt@>|Kr!@kq!KNsCpep6^?&WKh@!1ns`-F$0 zK4}o1xMI(b!5fzsB`jDz;oS&B;P?NsG*y`GOhmMX$QiCj?1*u}#-BdDYNxwdkCZIa ziNjOtz~Xup5Dxnq<>aOw|58=#Tku{1+S7$ zOLJdo)egWLrO|*Idwv!8hUD!lLqnr+CZnCI;*$Xbf~Jlu6Ka-rS$dUxqzXJ?|4ezY)Bi1guEOc6OrCCxxCWnk5T!2ny5#VWOgChtlga!rggWwR!%Eucb-?_Mx z0iZ;g5}FKisVy=>Z4Z5Mvt*zSErN_vS+_Dg+uoGar14}FX$2Uy+U zQ0Xu?le6(y9+!cSF1Dd}^^)$AlnU8kl9Lt*qz#ssPU`|Rligfp?ES4sl_w+~V~1Cv zm+mM9p0%CUtvbe*ma5oTUIdA9w?@+)4uSo@>qh#IO%FuzWBgo7H5s z{#`5ylm1fL)LLzxTBp+)lnvG>=A~z>F zt>Osu0$uHUrd6DMEn;71j(X->m#;St%3e|?NKtQI>ae91KfX1U*BAbQk?LdUqH3hz zKsDR;8gQL!xK(cprZ}QU?fK)nCqv4Dm4#Cl8asA~SufqIs03y>3ju}SS3)vDwjiD? z4r6CaMga0G6pA7qN&l>V=DC7;>!3%;`NB;kqS+qv+Xq6FG#3~1%4VkR%H3zJ7%-+t zf31*o>MWeZ6$ZX&Tr&l6)oEPDZ}B7*cfBE^qmRDEqX1=;tc~$=-RKHG@Lb%*90&4PVq$ z06UtSMi00)JNe))dczpZ10p5aZ*&pb-@JW@=rnO585Gv<#f+5Q=dN4q4Td5^3kOqW z$+tnSkPkw_0N_n_I9bp95tHpikEFbO>+BK2&*PoTRacrF(__j(1TTV>3yfe%%TGMX z6QB~#4_rvgfdU)_0vx+oLIzx0odZp#P=E0~I*)%aW48U;pkQg!oNSw}JY&7ps8h&r zs32BH^awy-=g4h`nAYCPMqI~ z{|`rJ6&6+3Kw;@FsX-bPq=)VhB$V#Xk?scR?h=s(kseyQd+6?v4hd<7hW~v3&D_q# z%yXW7)?VvXE~^)efE+ES4e0tGf<^??wpCBz}&hC z#WE>IfR4{e&5@hYkewxxX;M(70V9~NTF3k!|31up1Ve$Q>;W4*o@iUSucX6zEm*vR z-r|g*rENWKoSd)I;4S*=qljlV=|t+uCJyE!Mw9xE9??W=7Mb!5|GRC1VR=8K81EOs zfCld7==9$7z{RXa1L5=-jF~Zx92I0(41R!9M|kzZ*{mD!h{vx-g-=O7-<`hnCwFf}p&d9)B;`E3EykL8bPjR+^cQ;9d7AjPMznRLq*Rc zmYiFNyPXy5g(EdKIfLSzK4@8TlzbqNnoIu=^i4=V!H^dG;~yX!Cz{+Nyxn^PT;at> zs=z~oYK#A7je)_l5@z9kMMpp4^@U=#iOycL+sfI5XU~famRm}hBN|H{e8j4Jz4az> zByqObe4OzMHr#QaY!4J#iy2yBZfg8{Oo)*?zhe6bmd+mJXTxZ!lg{B`n|-P?U#|n> zr*`C>&y+$%7*Vd=akT=Be}Wfp2cXW8LYvKkpGITo$L(3r&s2xT=|PPYU{k`kgGW=+)sSB~pxLV`1mYp^pC}O7EUgs=p&M>odFzAo`_xx?>}j z0A=U+h#U6J%T`ieqH9QcGW_O_&7j!>r3|mgq%XMlkOw!TM&IB2bpBl<+UQzB6boIo zoiQDq+V7<70MycEZ@Do}2eY27+0~ifB7jItWXzBvH7R7VPNAN$ZFpqkJe2!j;}e{SoNfHq_UhwiJfq80A|eC^VV;P>U!3Wip}Dfxmwd2#BHdM6O0+OP2lH*d@S?=DFmreTAJ99(RYwlvv*TG2cb&Lc z(|>jGX^4{?Siinz)=QkM%r{X(C|Gg~taw*hR*-YjxZmTy6rMO;$AXyiTlr`i7}1w+ znNcDI6jl`W)BsuaT*sGANwveI`Y(n65z3C5WBz5LYRu1?-tWkFtVzbAvWSCRBD29l zOxhhMFvYVQu@cj!xdMN_Edq7p!mu9pBAxC{a&2!Yth?{*)A{+~2VWM;zH-yZyOqCQ z^S(_EA2uZu?M_quP(kEr23`}_U!B${w#EE!Yk|qvaDXBc6_t>+%urcA4+U?K^Gm`_Dc0?|CqpM6?a2@e-r4+enxx5N#uZt>VGI05KJ1h_Vf}fF z94Tf2_Up9VPpH$CpL+N-u~U!tzZLtgI4i&?-04*!u%iQwk8p#N;RIrC6+ow+XIpHB8($aa4Hbogzkh%X2o7Zs``NvEvE;scWe`y;-auYab@| zPl(qx!&cYVE~n9#gt=VZpsmpdu9Hp7a_l+vi>^t&X_$lMak=NAheK5%?d(Pz?Zbyx zof5aURfO4p`f1x68o@m=t!0n8IlJWIgJUsBR%Bb}$tw#VzLfbMKt(!^vpY{ptb zRVVmcrRQEZmiRjYgxff0v9?hg1_C$2{~o9# z`8^$)shqh^!%?u&?Lu=5ZSu7D-)KE=K|!na8GCFer-srW&S^^5*Nge?z2+pm_Mzz3 zr>HOUujo}d)bh_3K=%z#6ViMnv+|tLGe=26|9>Xk)l zmmaB^UFyK07g(Dp)+#W=-ozi=(49KlYrL`fe`j9{tV9M2B*7Ut>&5W`C&f#{iP2~Bc z-h&9{DYzLudiTPloSk~f1%CUX&R6=9hZr42H2AN1^%r9q!B2u=lp?B~?fe;DD0$6} z?~O1{!r*yNlR`8UKqRsyty)`|XLU#!yf6B6?mOUMMQ5`(FOR^f`7)=38Fy57Y+~X( z0x!Ay?4#j*pxGq)!j(l1K!?66Zd}&~3I^`F;WuWnvN*C%$nr&86x0yBgzN zW}c*D#>X1z)r&a9tIR*M)O7_~zYndw-I%$Jv^42w==M*r3qv z%cY+8<;is;GVHskm4un-ID}2N$xe5&_4tg5`hJO89ie+iVeq9nX18P5FIwD8Fjk!D zN3Yis!$%4OHgVS-F#?j2>)}X)nB+`xSo>L}bXSwAA>i$L=7=cJiZ~C^Z*sq;Z~*q+ zwxn1@pa72-T!7HeezMvc#%+12>cmCP&z{xaJ8`nvZF%E-33oS`qkv%x`$6R}WB*fg zm0mi^3ibfpR53Yy_Q-i=KwjL@0I?&IlC!yDvjQ~mX6(=ra<G`Zgq zo%|7rm^fXo+{(lZz{s36RM1fXKZXtIKccuI$NXisc;eUvZH_OfrAplDhlPgrcmi3N zkFo?rC=TngS5HF0l{mm({=;c%?vMap<@V-N*{_l5Q}(ZaMd>yr`{Rz}-i9*^$ zMC97%ng>Te;FkAoal)RJ^n#4kM|j0sA&Q)7O(K$pXjLN@+1E)r)!exH^fLr0IWe=z z#YCXuot;nQswYe%x&}M!4B&Xh2^j?&)L-A!=QPW!1>O{-^}Z_?LTrUh5UXBkHgZtJ3xoOX>fh41{H-tT58%EnQIXHW2ul~v7qO|rI&pLgc zZ--yLbyWLoF0>zD!SK)mDACt!M{3s-&wNhn|0e${j6;OTBN*7ZD6iC3*3|~ppa{i0 zp6A%li`E+9?*C>uOty8(-sVxc!S%BdB^S%&leWz}qTNn@zP+2Qp20M+KeJI-kXq2L z^sd})NHBx@sBzznQefC9Qn&Hlj6ELb-&>G-S3Vt1pj5V|$8@N3K1oK$DhI4BLZ&P& z+HAB8uO0PO#Ysm3optZ#!XCn+M=tj#YGq;@CFCE3CwEFuksMXmsxce#lNFk}OX zeyt0)dS=^wIPHwc%p|vql!5%U+QV>|`i0WscJOQ1#Ms#QUj)ruEB^(PD3;A3X~EEV-1rk?%j=a;hx@xR>u9mAh>??kbP zJDj~O&v$~K&V1}c9*sKjusn;l`gilZtmfHSm$Sor_ba!n3Qag;a5C3d@R0(nMfbZ{MfuPrO`F}De|&xSwegBdh(9*2~U6~&;#d(&aLcnx<^FX)oC6aCJMyGk0-?v zWe$J7{X?&@SwPL;=dZ4miK7;0+}Y~P>?C@#vKj3IXFWJd)d|DwI@`R_dP6v=dN+D~ z`!2A$j=)Y0=TSZn3JHk!Z{jn!$<=Gk<@OR?8(ZLHSI&1&<@e%UyQ;{60 zJYYzSrp^TNobBON&>yr40~y|4$=W(yld9^ZnAc8WG9Yb1B5L}M%RXubX+Jv;o1 zqIIa??5PzsN)yAK5WmD{h`sK0{Vj8msNT>LIG7Xht!J)yleAa@>lG2O0aq(4A!eILE_0ImEOf+k z{1A(>kD(&;AzDdQ8pxT*>3!zqp#9Q|{n7VXjIQGreRD-za`r@7GYHgfOG3B1)*W|h zvOla{@N~IYe_;;H_}F{+#y;-M>jH3ZG#|iRmLcu98NyH13sH|SX$ru*ZLRMCb;7-z z4_~o$bDYE2WrL5cMRqqB>hIN^Y>Ndp4hF5kq)Qz5+h+H$$E z`B&79#~>m>wv#r0vey&T;)U|7wm40eMhF*E!bdy&7hG~g%)x4YhIq`U`IofgvINf& zFMga4PS=y!&Gm7@*FRaRwGfV3y9FFxs=tWEKe9Jxe{tJ19)_dhxk)`0*{il&7@~9p zuxcX&yf-EYC{fXsQc5k`D}WC$YoQ+khLQLQP*Fx#T41`XeL$#V$)`h<4;l;bHDG&pK!4AdL@~F5Cjs zlsD+SQuzyBKyX^St7iXabn9MVaFx%;pg>HG8GitDj6w_cQYi$K+8cs}z8YmZ&8`8HI~#JHr+wR?_{iY&Xf{e&o>#QnAqgR4d}ufth;o#4M06QvGMZ~c zC1I4y#k8Ko2JZ~@k=RSi`0eRkR{{S+beG@B zNy#J$+_G52Amdhll;Vk;o^wcq)0Ldqh3oW~crbFi_eDhOVpMOsO~eI8kh+F$)(=on zTlrfGw!xkxU&4m_#XNvZb~lZdfU?9S@V4WkFlUcwJX%vA%P~xiy+AU}#smwI5Jb4~ z8bny|Ivz@we#SrdK9+Z#GdF6tk{Q^+iiJ zl$L?*lp#qcsUq$cpn@N~cBaJIB&1zl@EqS};S6u-a5scGpGRVR@S6%e5ndSI9KXiG zjBzv)WpNdX;I>qvE3U?mN^cebycZ4{ltf+U{BRq>8{E4a2QwA!kY( zrbXP+LTIapA^9^2Q?z{m333M`SSze1t&rIw@&I4b3@!z>3)(WPh0(jC$9PjwQ^#z} zUS5wNB6q}3YOhDM$wT6ASd%>q@pWiRo>l|JT-k48a&+Cj6YMc)iP7J9$wijDnhs8m z$5-#plbrsx@uND8_Sc=k+?bs$*@t}9N_8AZ8avf>A%+pJ%Dq<>bLIYXxyPpGvR-yD zdKvKsy!pI%F$XPaKpn^8qa8L{di*~;b6w< zI$RIhl4~Jl++!dH5eH0S=q8WEsB=hhD zV3p9mLb{vt0qTLiC_+x(TPH7X@9w1~vTPxD$y$psl`^f0*6;`hd{;}a_H`A|w~WuD zsmA_f|KSP{t%I%iFxGyXW5G}UltB~~75jmaAa5u~9^VI7T_zTf%?Y8s>s>0-@eM%c zdPIt4=QP4Wt&(K)^>kUQ*~MJstP2S+!%lHXfj#*;y__0*IviG;g6isc>=vsLPTMYV z8I*IdnZKRR;n8yTmUAE9(&5z?x{7f5 zB!vI+cU2@(!!=f^_Gw(l^E$n#sN&p@dHY{OY7~AXS?jz#;#iL>B}JOm9L_(#C>X1l zU2iJb#Nd9s+AlJ>e)Dp@-gG|KC@1VfAb(S|UDNDC@CYZtfwUj~d^GeqFWO^ZXZi2j zqt)Jkw5uFub2OwbG7^95)of?ej+4Px{iFd@`(%J5b^>Q`<% zm5&%SA@6vy+yn!P7wm_JRae-{XnGe*Rho}RsL=dNp-RfI-5v`Z&b~zYL2SW)vuCRw zQesnCyuJKq>{cm#p~0=09BgrG-D5_6rT$^`X6(kgiguySEjNpL;UV?CZU6@O`N$Y& z$D{nlHrCasec5GC#h(S-@Zn!q(7Tn3KPtPAws3@?m+8VE+dki?yV97=2RIk~2vH}} z5PTnNymPUejY`#q8sM!@cz_4qGM<=@gBRBuWl3)I^FYv|4(oCgK~>w!8(6}PM4-Ma zwzGY7Y5t$Hzab%<&O5I-wH3(z#h2!znqD|riikjS!}I)|U27NYvzYerCC z5(+P9&4H!lADU;w6MD=T$As;O`s+72ZZN(p-X1(CutWCs(7$oMRn)T7~J^TkAK^o?wK ziZQ(yRnz@s^--=3-snH79qnXNXL!_W_& zoqQ*UWf)yoO*|N>6IcHCdN5u$Vsvm$&NOm8yOsy-yo0yOWZ*Yrwf$7@V#6)LvBC3O zTn8`Wr2nP_4S2+u1IKkV=WBu)Xg~6-r5bz#|CAZ^laAyJG?;Uk>|0g=PB>437H9vFI6^ky@*=2HdMIFT+5;8T!kM0){4{*Z<#6gB4NRy`c zy()Mz!rJ2X4M^Bvhq(nqpnBY|*q?FUHbKMlj~hSkD_)8PvvzaD?)`_2eZPGk#~8z@ITAhHfMfm*r(qhGz_$jcFU`#&tDOY`TO+ zt5wkr4^0$_YyJ9_cKBtFi@mt33GqL>Xqkpg*rBE-keD26bn-zLs-mFv*(yt=n9zli zXg)k1SkqLFj>suW=t+8n0Mw#4mBwhzPi;r7Kt@$sA@I{KQ&2f5h?6ag84@g$Poz{G zOIwPOh!GbP7nt@isdlF}#r*X4puO-n2PyrB(O27ra!EoO$1qNwmSaL_7zQpc4>xe9 z1VNlMzYjxC_DtP|+7aQiLt7 zsOi^7wUql{E?Pk4$0QM_W3F&m+M**Se*QoP|4Lvho=46nFjacVSe4K_oo-B;Ka;Kk;BiZRyYoshkv+T+Qs+U$hvh!VUZ z{8ARJ55phv!N%*bM4;QX{MZLq#xLyhYN*s_1)84HIJ_lc689XuF!)~*KMcM1MHoFkSj8ORnEXa{ROAzW9KYEXY^E#?B4U_u)!e(2?srq zaZ2&nvQ&<(;kgk3=R*Np>S3u5z@Ff@I?pjrN+0y~_^1AJsdO_k0kuR6gl(?^2ATKh zMZsyCP>dL5Dz>i8M+KB29x;x=&oUo8z6DS8lzV9oFkdOA!pWv=WDF_dkH0&G!Jx+} zLeN(~Tv5vIS*L^hHAu;MYt~XPMdnlX%X`JFpaNPZ6qPncl1p`Okz5rQt;*Wr#Q9HF zk=ARCIo3v|rDYKOnX@S~%#b^Udq~F?E8=M^2uD1df;#v+GNRdijHFF4LJy38ZhTK9 zleR2@_??gvJ5<(~es@gwRNR!vcBQ;Y6bFQFS%5$HGW$$xul2neYzUG+$ zr{hJ_9V(&F@?fdKQ4S3hXEiOVtr=JRl#HOk@7N!U&6(eh4~l=uvD_bWXW!y>Apm6b zO!1Hc2Msmo9xJZpxS71Ef=>B{&NzJuU(v9@>JFloJNQFo@CH5>zHG2*iMNtKS@jQa zQ%Md^0Gp0N{cl8we){7~myT(TX2hBHfC?fAsF*^TwOlnwGTZuVYNiA<(kf2d-}12X zA;s964B4(=zhVb?sdrYsscr0VU5$kfSrxg!l82;+M5kn@x!d=M74Z5x&d5%>Wum+TBN~HaE*C}U-}&OkQYft9o`l=` zTO65mwYS4YRXZxrJ!ePk(oMgBvZ&`P*p+ocFJdd|I7w9=qRxt2o7r!rmbLjN9GYUs z0spb)^jgEzZ9Z3*Hb?!bWqiF6A@0$(V6&rUH(&Wp6$r`-70ukPXr*3AQww{VUY;Koo1E!i6B5=Za9#W8-dPeMZX=Zroc&CRSiISF2VLCDDBQV` ziX||0T??;6w{9Gy#kGz7WBPrj@vrvw%)*=nhyiwK8Ri`@jj0pFMc7){4&w!JK?v7Y z@XPX`op&Ri8Lqb+>ra6z>yN~;9IJgD-yydqkgPo!s^@tNh0J#H5xDurA>gx z-)dUJ!xR4mo^;jtrZEMXl zfm-By1Td~*hXb?TmFZ_eYDB}z4-SCfA3sK z%(7c6qvzRscw55$zB=p02e|3=-LgVZQ#ZqZO&+AV(qMKHz3a7~obGipX9;#L7Y_HS z4|)lNfdrcfsVLU))MfDcq#&r9c}Y*_D%IRnZ>G=^Vh=+ih(L|7eEQbyO@eGrrfS|I z+|yV~D!1M%=kYY2a>}B(wb)Y@{?6Jfh+n2ztTII5%r+kFN#8Ws zAmOsZ7~uZjI0T<}j1d#zx!Xs+N^Jqr@r~A38pPn(1$H60tT&52-p@*ybn1a{HQeJg1#6Dh zgIDK1!88c-S8WH~6_N3vc|X0r%Tm_pfsQmu3EPe~oo`weryAZkXtPDU+-*}k)#`cM z9T`zmi_c=Qh`otuKU$X<&ACJU6~-M~7S!pShec4vM_q%qPk$0faRZP_*kaCSi*2I= z&#^A&%?5D8&kW@o|JmvB7%mR&-M)eEMEisx4&7M-CtM%kf^3_fN}j*MP-R#7JF zxL$Vzc&sm0!F2(MKJy|`A>_GIeGUBh&6iT=qF5{Zevzq08K^HmyU{^{6d&-ELMNJ{ z{!)ujwIdGv%%5;_JEIC0lY;#;?t1TbjxN!Pu;$g(k5!S~J_XIi(}O4H$M7!?`-Q=t zRyA}LOjf@O#y&LXht&#a4AnS`G_z7F+$Z=i;)S+N|YeZl#vm0Y-BhVL4AZ^jAfbh z@on9E03I?hyv!-?-q-O?M;{Z^+2O?Nacc0(L^%9GB3?t)vi#{)s!R$p%vjCzQC%>W z>{~|H?P1thx)n@Ols#88*!Anu0lyhATi4mLrlg`qp+)TM~2j|m6Z$UQND zcu z2bDn=TeJ~D>VgX9&IE7yx&-LdU_ZMS)kq@3==k_BQSQhtyv40e@(_(=Sy20p3E_Gw zQzg;f4wQ538@Ltd{=-vH$b)O>2rA$3gPR7-#qpCU5+(+Ly^1dIcxvboCx35OszWS9 zvY<`P|D~!#5K(1&0B)Y3cto0=Y3m1bEo;X(V3iH47Kiu4r4tvqWKHZAcxe_z_LI)C zpU%<|4&x=3y8p%mYVKRaht{HkE?w=#T>~pAYSZ?J7Q{xI#taS?4j1yM2ktTjb^O_8 z__QehtxplXzO)R#`-TxWg)!>0rz23*8dtkGi#Jg(&W&36i46Qa!Ty+^DX5pp@93|s zdB(vjS(U+S3++k!yxkfWk;82n|LAj(d!9>XzWZWNI~W%kIhnW<+OQgvE$pdv{qP6Q z5$ALJ{BWx6By2GX!XaySJ5W1bX;gM~A{2SSd+8m|`>*3A?e8z*X-u}MmRHF#uG_5RNoaE=*>{Yzr-#W0pJ&D-j zkowO4m=e?+oq9Ub71`Km)nv__%r8s258gW^PIdIlSMk2p*RR7nO`Qh+h#Xw?inSYbd=F%j_a;eqx4x!o|3+Yz!Z-GyW9 z?09k;v{`9Br<;ZF?iJKOsUFBzr+xpTc^}n7(TCg|AFY5WxOetc+7p0cFPCTRQ0#id7Ux7FpW;bapn_kw&sgm=J$rjNNUSdF(S1u>K_C!G(;$RsXloYg;}>h{zZE#2##P70fYy)% zX9F)E`Nv*0;e7}}k3K2|ZXtrvZpz9g`AsA!v6J!xUQjbQ78_Fu!&_#%(HFjEta~j= zl+JXgwl9-?E=XIPyCCK@@#G{Z!fu`g6{;E#uKphbK_RQ>^}pbE~V&h*$qzF@;_ zFf3%)j5*zW94Q-Lq@)NM&3HRI@V4{bOW@4Ejc5}2L1+P%ZL@nWeo?zswmg@}Q-=p1 z&yF9=rlyZNR0zk{+GXgb)=WX}$HFHRGFA*a|G@mP#{~9Q3p$s%?d%}qm{<=KZJnoK zGb==i0R(<-7+e4|oBJE^V2t6Xp*N%ahRgb4hm?A21GeB~@+8052}hyOx#8;J?~MA( zu_k*QpFq&*N%F0r>Iyx)+9--BxINKaZn5(tafIoLrP<9rf3^F;<>K#KGCN~9qDK`2 z>oYzY-$YLi5Xtra_5>fb51cN?;%IJ2(q7NvoQe?!Ub_};ZrqdNp9@L> zf95lLZv5JP`j4lXac6$6HKt>&UR$MT1Yj3*8O8KSRXlOb^(ggC5i`1$hXfh4QyRB( z%Ahrb{HZ^-pyg(-d~-+sy1(1;B!2CaZB_gk@N-ElQjR+EzcC(2zLUdCl!`vBThCPN zn6dhTK0i}pis!B8*U96u+b2tM88fb~CAN)_rm9`Vd8}3_uQMA%)c-uF=DF0<=@=Lx z)PVA8bnZSkXwKyaL&Anrx+w+%Xx>+P^q>$#OsFdmvYj{AznR&BSnze8= z2+FjIQK7czhs-4!^{=7An$l%I;=neyG^`ny<>ga|eT1vf=~U*m{(EU8e`^{x7vgEq zZ$TGc^w{*;v>!HpG7{5GhplTEEICsKp)$C}MKbRd1Qfg!R21a0 z-$KCt80Hv!=IgIPWol_%>66RvjWG<9t++>LXc|?@~KPb-<zIn!xejHqr9wrY7vIO>M8>K89quUE8&jfvYw_0_O@-BdQK?r~UN~na zcgWOkc0dEOwjI$RBA+>;KLRJ(dsd{^B{wgx^zq%7aXJc;my8I3sEN{777h?^h9`V> z*YfnzDa>j0t=>Kb{p%D8=IdTJR;UF7&AvdzY9GT+!|12`i7}eURPVtg=+-vMkU#*J zcCRww2Kb6Y>SGzRMo=e=0*et-2XSI5%@ zG>eeL_?uW9G_5&`O!7XQ!n&=)8R+%-VJ~n{b;pg!_KOdAQDHC0cX@0NTN^YxX_%h7 zhyU#GeeGE}tOEp9c~x26g_n#@_aABFNh}Uh@k>nhNkJ|bSQ@5TNdt*9-fuiIx&+<{ zD<@z}kI`>(vayTmUy=??s^PH4E026M_$*|mK0W&@nbnFSH?n#$)U2M<)LEfTa{H_V z6e=Ap{gKQ{Aq<>A!W#c`I!%I~rw*BwuZzD;PSc=(y#tH2fwm@ii*C+2Qqqb!93w{4 z@c30sj6$y}xe5gdRrKHYOZ*q<46F^^=GI;MUtC^nQSSD13BV1@Om7*G8zw6F#Nqo+ zV^6Xs3Uf?zH7(Tka@Ikgt{s;bH&iyOdQ=~leV4jXd!@l@EBgD(;SZ1H2d!3i^Ud*b z?qj!ecq_Gz%s(6l{n{-}I8#nBjS4%q7o#;`c9!Y7gh$Pq#csmwTCHf{hC`zIq;rsF z<(1@tHhm zRthC!Ul|k{ZkgWbJL_|MFsIy!-F1xSfKP+Fnph*wU*;tLxr_~F9`Xt?ji z5F?-%#HJkiZ4TghW*EXOLyZf2rG%lJBS5RolK5E!BIh2wo~w2}&8$@(2~bCD{_dBm z7a#6h{aXf{xmgcC^2!3B@3C8bbuTH^{Z;`jp-+!h_-}@4Dn5Y3t;raDUnYkHJ&G2IJf?Dx?4OUU@;&hb{H z0iRH8xl$tI3ESUYl)=Bv(DhwsJKvisKa#w=kaBa6_C#Y@R~^6RXADi7;W&@)VvF61VRK2NQA{cBXCPI~w}Y2d3MR znvu@!cZY-mFBGHoSb4>d!e!Ba9Tj&%e)Z%sk+OfscDH{>h^uFQQrJAL3;i08zY2G5 z6jW`OpDU$Ld%LrJd!lKla=M7O;P>>ktVj93ED&}M^odVtZ$f8TAO95ae1F@QvVMB1 z`mX{~Kh9+Ko%Z3wc&Btm1g8LA|hg1%cXFT*zSi*T< z@ZLZAa51q_UHCJ&PnhPy^zW5 zMiKgjRbW>T>MAO3NeIEo4W!LjUIcXZiUGB*|NSWmbj`fdBzJ9;P{F;brW^aE9KWdu zCHSbPRax=&x+0EdAsoaB07PO?TKE#b;&3iDi>><11-9Ooyj$QUIG(}>QZ|!VM|Eua zHV=<)&o-uiyY5{KjX%^WcP^hLc37`%WiE6)9h#s^Da*e|nXhUHCID8^ZTnc7u~Gw| zh?i;NB7vnq&ftn`XDzARI@e?6360JMFW2$~qs{jX2JwQeouovB%9*IB*Fegs{M|<% z*qo|uMSXSK5ub}!sm~W4jy6Y=x07mZ38o<>sD=oHd!99mh;KvLB-(Tg{vw9KfLGkG zQCVPE*IY;h%RC%F8b#<0D9 zWyr{uWevcjTj-1l0ZQjNINUYX{SP`KN9JvJTOnRv!Q?V`?p;==KfE;HL;49mrgsm? z)sVZ|>6?+rrDu{YxNl%uJ2j2N3u9!eAS^O$ghE^&j!NZihZxKl-yJ$o4C(7tHknL| zn1l=pxRHsx{)38^@3;ZFBIkPZM-*vlz8~PDx900Q;_)19#})bcO%_y<>RsK)`0$I@ zQSZe37(Z$1v<%=mKB2ot{iP|6SBG!AcN}HGZe+zQmT{f>wX(x%YJu{`{Uk()ZMnPD zpI)O}i!dWKO`-`($gEKobddth_eE*0)WIR%$&yHyNmFoAXnKsmBtDP081*zz8hZsn z(=n*WgbHZN*6ZI@Ls0qp>LEB!=ZYo6@i&xmZrU`{jAg5+Y26fumckoK-msQo5tIEL_y_c(&@*&k(rZTAo^&5=4t@ilt7Mu^$d|7pUzHMia zHy)4opMsJV^mi(q&8bM}u%)2e0Ar*D&nrpG9!qZ-fiab7Y)0j4Cm_OeQT|YKn~>z6 z)+m@Ug;b!lle46zY}Z&mK^0!XcW7~+cabibNPolHcX&N=J{f_Fxs765ZOMqhAH-`< zu!T1`@q_ntqeqSdDRv~K*RvvXd%h?Ab~BTsBA{6zDWF2}?jhiXw4=ovrLoGeYgfM5 z`QD3mJVz+Hwm4z4Ad6b?TSOL6f*T&`YIe1BKAy!Oq8OI_;9vRZkQB0-<%ZG`aXCYb z%^gtFvwi%LVptKD6pT$&(j%Z1WNoC?7G5Bxmlot(nwy*a{^m6?QSm#qF&1~O?Lxmc zgdS@k-)N(uM-5ANc8RUeAxo*Iw9una9wAxuO>K1-GCB1Vz`Egm%KrX-kfoJ%wcJ}Cbg~^9529#VZ_+E3n?kw-HDowDc zSONU|!W-|{Pa~2VHbvoi@ycTo!K*Ryin4YtP=o=OSOR*1csV(l(BN&aP?>%@o{UKW zTmlzt(wNA2X`L@TlD+fODB$kujP813oBUN1GSzFf?k zKc4vk`Pn+V!6`66kO?9TQvLe11#upL(^^^my}R6JY5T>TdboPNJwOiz<2yJwVA`$J z+eR=bW>_^Dkjtok_*B1L#Wsrb2mtjVXB*v~nkWHh-DpBo@iEu8uAEFv7|qRq_X9M3 zzcT$opbcC{zVu2GgNeHfi7z$Fv^1@RzYOBz$d7Rd+Wl$KloHPs^?_?M`II`CELt%# z7QgmelmU%_XCDn}s=P1vtl6F7V~lg&vj9P!JCHPy#ouqUVF-9^GWLK^Ag0Riq0xm7CSEgc&!B>>5l1q*=YL@{-`&lLV9t1aR$7p3jPf8v{HcUdV<9{=5*imRNU#+DML1mvEO3Eeq_y>jjfG!t(Sf~P*? z%9#v{QPPqIFtOZc2ApQ=~qgJd|M~yju~tr==-0 zt9LWa)o=J-KGIe%j^?2IxmUk(WM^o;H}}0K5)3=wN#2ALtCOZHYc&2J1~fhO-e}ht z15|4q>^X4X$#c~J*|gl4l9j)|6YO`i22wLI;&XAdn13$&@fbl4jg3;jUeLZ%``=R0 z58)uar_ttKUwG~1@;TkluhIcyvIYIszlWxZvdie7LiN^XbD^-+}t zQKylg4RAEG_qwUX3E2OdG2;dLz)3u0)-jH=F;&KgdhHZkJbV3(&mS3<69WWC(wE_317q-C>;XJ){T_kBj1t<(S9n_Hjk#yEkO+Nk?rv;?D5s{E?>6B19 zM)wE>C8eZcbW0-=A~kY!_ePCQ0i{8@yME8-`}>!};T#U^+1-8L`?^w&)^+yy0HQ4+ z3Ek1E?mKA_{>Mmr<$6Rd^Y``A)wp9NEqBPpr^u>oj@75+_Iv;2JJ-#^er*@adbIV8 zNEQ`5lUEJ0pQK3?wwcZ2UEMwhouKuFA<@YZc&hC_dpeR*!@y)s*oo5qpLT*wZO26J zG}3wsF@9Ph3=fBSj=saJnCcN{@KY8g%xgwawEEHq;+zvaf57;{=-yT_AbhFGV<6fr z6qzwI@QuGvnVZ}sypBD*(JXwNR~jBaX5L?_4}!Ju9E6 za=9~tuk0ja*@_9e=kulbm&$|@w+($~$+wm#CI!{*ziT6xZh6)`I}#vLlULopRS42= z!bcQ;(vjNWTlZ4DJY7L6Hxx&o%KeH>D1D2KkU0_ZW0eju4_=?}IcEQI#)!Y2cXCW6 zMUAekCVSM~JK>rrW889`_tC;xXlFeR;r3~Ijy+aM_b3>6S$_!T%Y zcTM>pWxT==>RgV8Yf(k*&+?pFc}f(r$KO z(Sw@GJ*HJc8a~skcb{|^!j8!y2UeqNg(}!=kt|d!O}6INTkYoYWIZO~De-1(4Gg8) z6nycl_hzjdHbb9o8hm*Px)mHUWHdjpw0h!8|!*HmUoi&PZ9JXCa!pyk5`Rh70uTTx3P6 z2n3<`n01Q*!`0_LMoRVqlON=6<+04hmmsq43bZ-f$iJOL#J={-;x_plnd^m5R43%j z5B)~t8RJI1QYK&+dAlA3TiB3f$#p$flBL(vOIVmnisNWPVyag5O$IqOWoZ;Xp2+cB zm*bYM2(DH5UlFk&;S`~{Q2*h1Z(gOXE|)PpSM?cTf5oF8RnXPMv1R4?OU=H1`G;>2 zS=X^st^;Ev9FRiZ@P}EV`A=i6LsIgedk#wc&&Z`Y4edI>VQ?fH_krF;+uLB*zQf5b zZ&Euh%hHj)p+3@Y<77QrWQ?Z4MmG!#Idk*<-#4@fXw2R=S7Uk~SY^u4k8 zo4EYFa%C+{P}Fw|LxWBRQlNaQm6#r{27Ti$;Bq?sj!wdrE`oqOubPkW25FI<9ItSb z36xgf-?}i&AO;7#`xF%!>v6=*v#x{C-Gu|p0D}oUn_Y?#S!<3|-~`H1^z&NN!hFIH zZt=(FdZ!jQ1Sh)6LNt1B8;%XUO#wGT6rk>k?T71y{f&$()1@AwhNtTf0>PDM=01n# zv9ePv&++6U;O_mdr-mH?a=}cL!;KA~xGGyY0=F!ko{ec6K5))yY+r zO&zel3A!hJ`%yNYk(QAe_X-+2m02rF%CEzwAA3PMN&QV6pZ{HHqAAvaT8+l^O>8^>OgQNQT}Ng1}?uJm+`B`Iwa57FT}0N~e;r^;#kgTeT-Wy-eyKdznY-@b^>qQ`&Q3s$r4MD9p--14ZL#?KA9(TU4H~L4zczqDUuh9`tZ9N`^ zI<5fQABtOqmY%pVc0p4V)9f_DNiC$ zSs;9L{uj#ij^eXKJ@3l5?2ebNw?p(?i{Im;7(W|)Kz4k9AUoWo_T$tugxuNe%H#XO zBPDHen?fW4Z_66E|977Z-E;z0tM=pGlmmA^qRXufvWVIuV(E^n9MCEyMYxS8vR_q& z^U3b9IORw5fR?jl?JYph&UZnL4Q?`w?b$`pn zo(a(2bZJYF)Ttpv+we_oMP0YREit+tr5;*@J@5v1KD`H0a9%x-Zx^Lq8ftYM_dP%y zY9hKEs!&ob_oxh={4W_x(Guus9(twPP+be{r9_vlMP4*72`Nbcg~g7uk8n1V5^L< zakX`Y{ydc;v0wnWp?+g^ensk;8>lZPjAt?vTqZ zR3^Z?h`n7ECmMox1gx@KKM-pivy_295M`clemtWx-wJd#rTZQ|sw|J7_Auv*T0lo( zLY%XLGkB$CCp&&m+F_oOzIiF*BUrOG`-0O`xMp3SS@5gaWI@`nciPL17|r9GXoG62 zzsd7;E3W}y8t!FPAR&{9H2iVoztF!|wdIze_eCbZx61|!#@9MH-&LAr$KD%8uU>mb z%`WMkH(&2sRhI5v>ky18W+5|W#aSF)e`BU(dTq^5(nHCt!DYPFv{mbMGFdiexjQxX z;(C>BRoqRWGRwbRp9CCr-} zi_#WS=-}84jfX>T)X>^4NUQX{;>~s^n!{{K^eFvztKC;J<;JHpKQBqU1je|?Yw(__ z!}oB*%5md9YI7#hwbQ)T!UKn<60?5XG^h z=E**aw9E*~0mm-2<*oP4=a=T8j20Put{-O{iZ=aI15-^_CNwa1$Q!!xIMmP;*SPQ14p z#lSy6{aLzmC&IwXX=ShKaAh>T(dFS5lr8R07eToBl9!D3+X8AHb>Uv4g*|fu zXa+RFPI9aY%E}xXPL}ilm z=Ie?>(s6BP1)HO|LHylS*UXSvt*JC;6Eihi7>?LTfltv*m1f*Y1Wr2rR@U_t>A0cmrc#9gzu?tk%>jRToA5?JfYdhm zosendibI%d(%ovm+5BnN{ApXoGO$$ZqPP`cw(vvDpNv|}Naekzf;EkmYjh!jM*1Gt zD;pcf54(D>zF4kyWlOsLHaMQbXVmp;A!sx{yS`z0KiS)1eu3bAvM>a(tI|h03fc%h zOvDLxg1YRlrGzzpr9;KHDH!b{`}ofV^~u6-`}5`vOM2`IxEc-z8Cfpt`JWl?W5-?? zA4Wu!)QIXF+W#5~NUERqQhQNyw~iLuV>qhH9`aGiOwH1?oTek4m~R=AYOi@FeP7{_ zg#lkk$ikHGE}5@po^Fw}D{Y{d)4e(vwzXpGKl~~0_2vQ)^#uzihbEteEozaq zRMj-#Wvs5p>4k|-OfNA{GxWU&vS4fD6T<_?e&M=B%I`jv{ti#43^!-Jw>42&Ik^*8 zx_GfhbJ>(*VOLE>vxd{fK2BTxS0-t_*SR3U@$e5iK&lxxYDkB8mk{keh)8DOd>b-(@>D zqQJAcTuj6h^W772hWBJ*OBAE6L#~@Q*!i)vjAeT8K3mH*+jXCWVqdKre0J$Du(Q(G zXu0&v;sg8c)oj0|nKuu~^Re}{xT=%6qes;TKxOIZ2Sfl-E#0vB!ePzr%7;?AUUFbhNM=C<}2f2~5KDXj3hT+Y3d$76z%BwPlYy+;REPF3BYVO_o5X?}c$y8dw2xPG(E(9=tZX|*eAA^DicnDc`s@?(J+ zzgUw@_}4E%PhHRWkY(KWSW6L1JotPI?bNK-l0i5f75t;Em<%|F!Tm}XJsV61{G*N3 z2;Vn|U)|h%9v@76W^puQ_rfTPeO5}NMk)9z6D$+HQmvX3GD{sPBrZN2ADGcfw>vO( z6-uK$6X|2mrJy=k#8vo{)e|Voe0|BxUg^9^ZujA1$lC7lzfFvU9F61s!6gLND?yw3 z1b6?Y3uuHiy~9uY?;I`%Yrk$1sMi(@^7(s>m!IVZcz_Dki1z2I(qap++I;79K&L^6 z!V|>>q_>_WtOsS?1releg)+<1GliAO^4qJxaWTV00b}rnFC6Mw%F<6mwWI%hz0JX@ z#zG+su8HNZq zfY&6Sc??oz!QmI3VaUG&LE@Sd{C$%=Q_zVqzB+xQJp+N$BMOjTCkOKD34lk`lLK;I??PmXR{MxFRNYW$_wzE~1WzD#>5^_6Av zCyr?Pi`Q;AB+O|q%(tj$hpM}u!>kS>ydMq>_%}1RiA8P2O_d}gc>)T^sIIW-@aMEi zSET|tZV4GV6_1k#OPs>aC+aN6epb z#Bch8XWsekWrPdOUkm=oGS+#V$;CW6eV81k#uxWgyeQ(ccp;BMx@FEWSLIFR;ghNi2B)lKp~DUDXHsT`9H;?U`msNMQL2hsfZGeaeYrDz=b zTvgRgCfjM=e#S3mc>~SJ^N?j_Vm`fT)VhYMFC2sTXF9UJi+&aUWg zz!jgwCUAaSG>XGojJ4MPpU!Wv9y{sZ%DULko6WYRYl6adKeDUzEs-7M9SbNDDeKk) zHu4(rW3sc}^nhGK;N_NE6~Kj#s^mY)9|)`DJERAtBzipgfe;8%ox`ND-oJ{t3Dn^W z8{}y)QlS{l5W5~nsCU8Th{8sW7&H;|f!lvC=bJyJzwF^hQ3t3aHSYIZK_i3k3s%0g z=91eC_i9AtI(dw4BvOnhe5h$tJW5s{k+EV)*Sd9+>JD`!Z*#b|e~w@LF4@i)ft~tJxmzXKN*mK+B_lP8KCj=7cUw>?x0zPZM|hpr0Arx*#C>k z`V2M=HiJgux7mq0OffR?=L1XOYr1NyHP>J&2YKtoFLY~y)>qc$zpHE5BY@=9m|-2( zo!8*kA*yfFHvVwUTL+0(9>SavuI@`61_bM!PY<0Z#Z{es>5Y}EnmRj+GJ!Ewoi{Yw zJU^6I^G$8+S(yY`D10&pMSXxN6*@7<(vsT7CbfsFG`9qPSaYck8^MiH6mIxg&DsV- zFn_OR@t!ac?lSUwmCLd5Dhv)OE;4Joey8;T=`xP-Q;%YCwL~}pwgm22T{@@Q;OncK zW~S1&%(e-O(Xd)v)0)XzHjD=rxz$qPnR=AN^;D<=?wIzh5l}|VZZ|z5B$y<%OYb3^ zLzeYkST-ZS2%o)3CnGN%NHNXVfiU@*n$W-}OlgHoMfm0V$zoI&wW$4c?oMAJNMX}_ zmw@nC8E#W@I8xhB^o?BVZ2xJp%jy?tCsXNs%R=mXEv9$6)rz&1GuR*L)GBeJs0oZ$ zpM=&6TZG zQYYeiwY|!X!RTIuKjyU8f~0kiy2!$qD!P`$<-fu{R-00!tsigiy-D5Uzex|v9{~G z$k-_KGipXjl>tX|OMbMqmxiCpqE*qr1sp}hBD5=?O!)a4fVL*S#AoVYSOt&1tt`(P8y1l-I6clTwMAi~xt|oZ%us7)(#_@?e%?gF7@xO@XQ4tb zCDn5kQS^_<@5ZICD+4fM)tBK)G9p|JTXr{GI<#ehQ@PD);w>6>6-fIZ)fs#GlIDVx z5snSqG`TyuF4?}2qgnH-DyQ_YUv~LDY5A#|*`mSw`=8z_D}w>F<$S$kdU)(3cUafI z=Ixy1&gCP5fFrvZC`h+Nt!b&z*#e{tydp@Ziqv~+$E3?{bbzQ~#Z3$~L)Oq{QqGC2=oa0FYA;k$Z~}`ws};!tash|E8SqQqx2t7JK_H67z*NI7j=8oi2Tui z_-Z*Y0^PR}*s^8lV_t7R{;$wSzaIUu`+@JHA+MSpl|w@OvByV(PE_@kBOON62L~|3 zQ6cuq0oYBT4rmcBN;r&RLx_Wowe0bgAMz~R)o2$e65{_wmi8{LCuy4N7mc8xXj_+@%EIpgD{i2PZcm0d)DCJGg8n>;H}dF4Nr z9x0N;rms?@+o`q|3Q^I_j{5lw&`!Q$yL@E7+5^irX8l_n#Nqi-2G*L}6_-9ThrsKF zI!~+RCY88cWX&*|XM2xPFNpt{>XwTuwa%mp|BP;uW+m((OdxQK>JMF9DTN)JT$iL} zh?Fxi5R@m(vDWv8%-zFmyOXgOPx*BkM|*BQG9Ul(zc`lJ#EIQopH41FsySeO1sPTN zNtFtRz%d5!M(C>YyqQW`)Z-1Krz1C2VhqDms%>Qe7l?O0Qc7a(W#-L$uMv4Fxsxo) zYOjkk(WX@p#3_I@-{h}+UxOD>`#O3bDMbX&Tc>>oGFE3LD?lExmMhyK?FS;w!={f@ zBNwEfw;GIeHNbgSVpw>s#_WL$+Is(*y~w3gb{xLyGQ9jMr_Vj%nblI{U+i-~lmfrS zwWt#w@249k!^>{bPiFTevFk=}{iM-xd}ZV(gN+0p6;%=*@pdK+-cJj>QY3-!D9ZPd z^g+1ZP6yv$FH2DF=?6v{{QXM4&Jaiu%IK#T>}v2d@lQyU>JPe74~sV%R$LB*A)xz% z;5Z>6-myZ=W5z=as0^cKuhLm=A}ZqZwN$oYx&>0j zMAsM$P%uA27tEsjLh<(6P+WbtnLWoO-yUT}u)~@j+$~~md`mG{BhyY?vLVPxnK|C@v1V2p>jj4A zs3*W%T=`thA>Nd_A@XGJtGlUAzdl9TLdIFLxb|I|^+?g!dlWE7EHDLXItuMd1GG>8 zEv3@eGz^65Ax`X5ymP0O%C5=vM z87}i7BfFfFAp?+E47k;9j7w(q8}8R{^Z8eK$q_;Y3?g$F(;`|8$h6b25frW=VO)UWV<^Yf z9oIt}!hrb}LlA*&E`(8U8$XA8NXAOr)aNo4kI05&QCNs{8QzHzLIa&mrK8HW3D<|3 z(JzX`^+y{-z1-H*0<2;;xnUXb zWcTfmM!1%Y?>ELy8~)~nwFK&c{5oXsT0ByjDm5M057zsiCwOk+GOy*NhPBEpA{oiN(^D z{pE%v3kyTBCdC>>KJ#*Zk99|&&m1msjFlC?Y~5H~=7ViH_RGaC!@Y5~xauhsODkFrPJ&oy;5encu4@t?8NN(OCHVFX064gAA*sWlCnqPja3TQI^~H96 zo?6h3Jr)3XLAMU^v9ZUG$ECiW%g$Ar0gKr4%NQxw5-8-~@N`Y`<-=Zr_#Sk~k=cDn z_517fgtGU*Ps=o{`#?NfVN%>2o0%&mW)$#@olB2}dHci!AR+$m_H}&b0&FOa1h)H} z+4)_sM$iZ>EH7Kg*x&o5JfdOtN(rjn$ryLs9ZJmSui_MC>Sh49Gb6xT$PI`7XBx>= z+%tDw-#y(r_if~Klb&65Zv)(ii$ncSkpu9!s*d)(BT_v)LA{HSs>fLOxsIMpP797# z9o+s82N6&D1NTQaCMp22;wfSH2s0LEja8lFQi0i&+2%o5(1Cf`_?g_g6@CH>!pBJ zmmAsn{x+jlNlN2R^|!1>!9tI(4Gq+27Vrt<{>7CRJbN);Od5{&>Gdq0!ZYEXYE*nO zD$R3$6hi!i$U%jL)rq8bK7Tc5Ns=YZWHjT!_Sp=c?F1~Zp6S&CKJKOhXe=Y`A}RPG z%kp2BG1U$LeDVJtP5k0bcriy9HARa!+OwH{_GU)W64bWa>7F2-8NNIdP}7BHh&?bn z6#b{gAlU0D>v+##tR(b@W~O&+ljH55vO#QW~5OeAl`;BdH&ab%>W)>_vI-GylDnq&x%wG4+)cd_2*%Zu?q25sA@qE;bB zs$|e2#Y)+)8*aaVc|tvvUCQms-tF%c%zneZqSaf9`$i`z;d# zCJ@)&k@g*B#%+W?Pol3U!Hn9|&S=BIv=%#Ytj~pn#t=Qi*>Gbz4ewuia`}_r>37DA ztLMJ78K&Del_2HZYKv|(|4Kv{=cSP6)*wn{#_Wia2X`Wv7r|`BEy`Huqd>U2 zKiVO<_^0!@H~u^IRM)C}@T#~22^NM-q*Dj;c)&PO9lc7}iDljXQOq-}+kzj9M0M_0 z%J#XkU#_0ZQ&ypTq1p(&M33tfe*t?#Dm<`U4`g-1IGj$P@X-I#7DgZ5;y1&A z3t$k+;)iCk`!dj*sVL@3b}H%2IF*GOK41HvFISKl)7r{&q2YSHTLI|#$6c>TH&D@$sJ9E-fHB5c($M2XlSd; ztO{{H^p^J7Qn_xir=37ZD~H@m)7cMzJolDxvoEC_dA&|V;(Vp^Hkt_Q`U}0Z7F=NR z$ZHG~vmi{F)QX28BKxKJHukT~SV#rH3K;9s;z-UE%z&D5Wa7YZzA;}0aQMb_86FP% z#kF{qp0}dC6JW^UEGK`0{K0_?SkpPnv(F4;Z3xtTq3bUWnny#p@dlrfGON`0O6x|c z=Fv7gYY_9r;}}7xDr2A#6^S#?3Z(%OJ&3ey@V174d=}E}Yg6gFRi+~vO_ULo?vlP; zqmMDXpn>QbpTSe0@2v+%?wZy9I-omYc4T)%Gyo2!HbvQ~5o&`UgDuznO(FE7m;s9V z2QGia;di#}7#pedR5S$ZtBc@2rvW%K@bl*=0ugvyd_4HEl!C7=G&lGF)v)Y$!P4MSKZcX2ez@$y*9cU(L{|C6aK3;%RJ|R#oETF3PqWAM)mQnL zV1u-umYGHE{sQeS9`aVn_a<3ACj21|=qWzAOa-M{tSNC5enYr1I~IP6`K6^iB1D-h zbNVDPgYMzgMKEdLH|DG*9DR2?1=BfD1Dji;M|s4$AvFr1hFcD(>-S$cqI|CZ5<)6E}lx6CU? zjiuT8y_(abI*b>@63HzhkzM~M;x^q8b8s^EbDP0*x+&?>!m*ZIjdblQ%buZKB~mYdv_?=;o;H^TaHXH>D1CoyJc*UGtafwU^cSdB1ZnOFc%uX1I!V z8PHdGXiQza4sastaO1f^e$BkK)7V#@?LDn<{Fqsp3|cm~&zemyFyg#T@@-)-)!fm% zt4HVw5rnrld#o@!9qhQtUPzo*Ioe3Et+bc=4B~S&$lTd2(S4U96x^EJ z!cP2cKC^x5zHLEGrK>&cQjdv=iK|a-{TRdBF+PseTh)oXyFVLSPf}*k=2Iow-`WZX zDsp8Zjqj3KPrdxq!*mMeQVT$(I+&C^rqy2`Hp3j>W3u#41JI8~6>>F8br4@_DgY_y z-5fqW>VC7FvMI{V`J}%Y!5A>k>%hl#2S5^zx4t{Cc;msaF^=6 z6Q8|l_N(qJ8aTo>?Ckgz7}DeQ7Y4`>w>NygZ!D2L9(V*imG+$FfgE*jrc8bnD+Dlc zIQ44vk9C!$0)Ep9Ya=vuYkBS zL56eQYD(R0rK=lFV_!TXDvSL}&n zv{mRXJ0HX53>EXzQAFTV%ke)m`VO9YBGD3_5TMatzKjl3**LCB&MOGED?$9#_W0gc zWd#o4D+bUivvPPfgFqhBz1eP0u`_qmMR~mR0cXBKZS;q!UoXWQN+5@yIe%JgbL`q{ zCG5KFY2$|i>PNMBh-$tadJQEQ1I}Vzm7!gz7ewQo9pS$`5B10&sH0o#)$tBZo`q7k zv}oe*l*Mo9Y&As;+-Qjj1iB`cWw%ZzUX;eUn&b2RZAC?VokdV3I8Ysd=ylO{*KTk= zpUj^B&A9pbW_C0)`uHH9ZQcJ0PnnkZlpRRj@*;L^D@r(}J$G8afS&J;)}C#z;u zH&Iaq>f^;)EJEzEaP-%#S*Z8M*A$^U|I z$=UV22T@YKs#FMA!`g_led`}S-+tzDe0Bx%J-`DLi1?+z{rN4StIImue617fEN{T` z+P^+@W76Ve1?awA_Lk4_80PWT(Iu=9>wgU&c7*7b_|UDsFTWiqG9bQzn@nCQPad(U z;AO`Bio{fjN%DvSdxOKTf5c>j*5-+2*&gx(@!ymaC&M$QoPZ0TYqgB<0A|*o{fYdz zv8{Ir+TQ!pTTD$@n(~CpmR~#>i^i`}=eoTuyDaOx7()mc6vih6QU3Tv{4BO0YVjeI zqrB>dvL9|~(8eC-1pHgwjHo=s7-aZxAW6i1d#OB?<3C>F=q0p2k171UULJ0Iq19o` ztw*FA<_Z8j!5UzG^cc<lR7_!_n*2{0c z|4+q0Bfok$_RJJMH}?;al(u-1{)zL5ZO?6)O~$PZ^A7RnhsLWneL!28QNsW9gKg+e zEpZ!*hO#UgpO5w4R&+bH=BV6d+WK{k{nSQ)cs{I7os{t-KS5Upc*3*;1K}5_$`vN~ z00W3IS&70qC}~7MLL3%``<0w5hEU4k(Hi(56w7Ri7}1(y8cuYvOftSV|JbT}*+2R_ zazWTc;Wa6VBhTvirsd=62YG8)q*N^t%$LIYazxjb^Nik6#a>=>C!G%fPp=p%*hf$+ z3Z1b{>Fv;3=Xpvh_;IGNp-?7opR93u=4vmKH0=5ja4EfXr*cQG#ej8K4yDgxrK!cm zt*<`)8N>CqrGwPKPO0rQtkKLzh>@;?g6KcuZpCxDG7{6Et5Rn%T*k8y2gd@GG<$){ zYjjAT-mBq&RXXhY()X-i<3?WnIFH>;~ME7k>w>Vr07i%m9G!&p%=6Ixa?Sr4Pt`rqI_8^*P3AXZ%7vq zae_06joD-G#(n_N3Igwr$6~#Dy8ddi*-7%Lm4iEudR^K|pSg;d3b-*VwV(B~lg*lT zQM7?Z6v{88CLn1dva^-U7p2E6Hu_C;pDnA=OsC%Tz9p63wjY`ouvYUgi-Sg! z;{hKtsPt+H`V|L|t{j$WEqXGfU)*`J(G|=|DQH#H!uapDqy$IlROPb!tEa5PGKtv- zfbRQ>D6X>Gy6fdz)=D#RTr?W=z)v6C322P3wv6gYx|*h&@uM@-4ZDBT8MIo3uAQ;s zc@T4Tlvw$I;J#rukqIjZctO+a^(NQMT}!b}Kf1=M9|8fQJo1dsAK9^60%mb7{OQui z=zCJnB7{;dE>zEbjQ8;>Eu`aOw&T~QPtfRIBU2udf`op%?^t7s4T^fbRAv&E117{H zTvZWndkgPQK2Hw={WEzDC)P-JKMe~A}5hHC3nzkXC=&Q0o*Qw%%rIg1^QQ!)Q6*aC!et~ z=3HUtzjO=?3>gBpxBy2$DhvsO!8%nyW4SnZ&qsKztLe$__7~xBOyFemq#5w~m*36l z?PgNV{W4(8!0z=e?}4vN#9Oq5Wtx!keL=8Zx3{1kl_$upX6{{AJ#ma$0LdXh(#U85 zBE@uJ?8ufbp{i;oO5w%}6M@mi&(GriCyn#j`9D--Ci;yc0&*z*1zSZ;XniAnfo=w_ zryz4zzIn`0h(|@s2_(8bfjywdaCD3Eo?+dG zF5&9`+U&b#^r3e%Jz8^j#6l%74oJK293t;f#pX-7X8-4&hd#5!$VfnR9C3OH& zZ3%%>6Avd*bdXs1!Dz;iMpa$&NA+>uyS<8r?bks2{eeV&wmsyS+xg|!yJwMq9&ku5 z#}wX|T;p5tG@xZ_mnD+6V%?lLCgnB}h`Bm;xy((p+JT=CUe&$_wDYx>>iZ^7K#V3k zs(sJSdhgT=cD=sMxBeTJA>@F9Cb`hn8mNnR#ue`7>-=J6nWW5 zsuvjfE3}3JwdjX{8H+)tQ){+;1s|Uw&I_GlarA=TgKLkqhx8s-e zOfyCgUhofg-)~mc!cQ*e#!@77Z2aeSA<90vcUy*ep423TnmwQqbw5jD5`}r*{Q7G` z!7kApKDVcruMSclVD3CWHv~!Q9n<73!lOl)ej1dtr1?Zshlsl!(P0khwX+&m3)>yZ zhryr?n}+hu%YE{{oM9Lt;CRIxL9ePF>l5dmkM2%btZYQ(B-4j>PBzqpz5G)V(DXvy z+ZS9EGI%%@4yKvU+(~rF={tNyeqN3lyilELz(5E*10EBd7#pWtQI4k%&1VVvjPfeuyjd~7%hW==u(w~GI2^`5jh~U~3 z7#0Oe#Oh@Fqo6#Uzk4gEw=cBlz__m&k0q(Aj&yzdB4Wtqqa(T$pTj)eLqNZ?xGY-!zn?!&NRgEhuX|5ttr*NaIOhv;0Xu!(9)lVD4 z0YmS$TZ^Hh4E?xMu1P*w0GjAW8JmqHo}52kW0mR?L|x!bkJgs>C~{O*--+OqkWpSg zfJw*wG$|#uY}a~=Q)3jV9iuwPKBTRtW%}u3q`JB?nu0DV5n$F}Ep}y~{1ym9=u1c$ ze8`4GW_UxC5h+1kxM8X#ae=s05+Q5f0H4t+44(E=G?s$c%8fE2!ie>;$B-9;IB8Yo zUAzHGU~-VAT)wu~0??H_QUlEU3<~70*LbY|xNKXF`l1gr8y0f>MW^{Y%5V1<1<=#g zr4B(LEKaTs_u2ZKJ?WZ-;R&n)xJUF}mMK!QCI__v3sMu0uM+ol?%FDFYw5Cg+hKP- zNHOl z*{aR{@Tx8Q(0bP0MQE`jxGIePGPCP%fD`qDJKg=gE;Nf@2d?m|Inc;_a z5{`72P4B-mbMZiRh3s{U|Je_KNIjnd0eWehj<)J1Q*Ym6TF-a`-fz<~eeRpZ>wcnt z+em)NFO5!r^x!HN{0f>fm-ArU7U|o6%v^@mvcS5}e_(LiLYvj?bdH-ptzKmzIX5k( zYX?)sV^U1q70`RR-!QN;5ZLG=)%@_mJG@jwO-;}Ccx)4WYpjg+jnfNmZhW9Y)gy7# z7CM;Bwr8K|lYkR)G|QVhOt;t-j83k0rpmV1==?My7WlNo_J2` z4eX8K1=va<0-NY*na^AvhGiT(*8a!qe#$=`145w%60U2`jn5c`WCcZLcl+}1ThYKu zq(21=0}Nhk=R&Qv1dG09Fy&@$|L#TqFjZtD6#~BT-L#*9bJ=#y8v>>NMotXG9Z;U3 zWNWrsNCenAB7dv68Sntcbm*P+Y4su1>I{j(5{)C(>O{w909C_+_$oYO3diXAA40HK z+;aCgK4WE>1l&domOR4$9Pw=PtFhKINvUq?F-_{o{{Bk@)p8Lwjo!1~XT&ti306*5 z-L1}M7z7TnFA`X>=?pCng;O4lEzegPCcSi08pa*Z)X!d z@!puxqmI_YmmjEj)=%9_nLD7?mgz%TjTu%Y=y1QgSu%I|AXRR>9SgwG+#8Uju}o1$ zkG=d5sU5ltuCaJ7NjHy0+IHAWb(NK|X23WW=NOTYdxZ$UNnrZ2*bmKq^+JQPfJ2wt zLAb_qq(6*AES7c!IR#FFfzz}}VhR;`Kw8mmNW@k18fa?fKNFd~9TsP><^GZfwi`JL|yNs9UKg*>zXx`PzY^K{bWKM`RRIYbZ%Ik*GuV6VuwJ zk?DYGUpVma5ff#cs6)qa#G(s?ZIVj9Sr==8|8xxd zM@$r(R{8hC>=^yXZA*cV451`n*Rz8^4-?(JjArfOdFf+X=O^^xe>)T={&1q0Nj{9j zhY5cy`A-Dov5DiI6S*t<8hCxl!~detii$n~b(Rr2Sfa0|t(Ohkt=Fz}P#23f>S}`2 z7zSKEX$J&W1BaRtzO*|mitm1K^0JAIHUNHTgT4En9B@I(^-$wzXB4_D?U8;@IQxrC zd-8egN8gL`lWZATNN!1Jlq&@IM0+3w8cDvMPF83pAavC~FB8j~V-FIi*Fqn$^Gj>+ znS~0u6?H_rj~smL*EIi{MDHuOr%3| zfep1(|A=LC*yeg09d>X{MvO6a>lYSc9HNiESN`7W#8J*5yeMA3i)&r5p6zA=S1RX!)EQ_a6cAMD8~u)2!&F?gV#7 zU*OO4!kd&*&#kYA_K{A*8%D$%TgIgVzh%Mkax;AZZYTjWj{0xb#Iuan48AO)3@tHJ zgmT!)bb^Q|%DV7;V@#e;R3R0mg#gwTUk8;b-M3rk3X_$s|M)w_fVFFD-tLLhuR#YK zZIoSrDpIzjht56*0r}QTUQSH0of9a}=&0ITB?r7yf#gKqZ6LjyrJJ^E6`l_$!aSkT zY?I>*u^60Kp9Ci0+A>-rFIQG$fGi22?#MjA&RHiL;3!we15qK}G5dt%S2nPJu?z~z zn`KF|RnHtxW3;$wSRxY_cHR-IyyKJpv{Ig5E%-C($7uD`Ik7QD&;ilZ$F(=Y{AvKK zrPV?=0lYoI9*dLZllwrD%_%wdP0aqi493*QRmJg))#6%R?D*b74IzSUA`<0QEnxp( z;O6AsJ6kj*_KeM)ywbt@pM(Kf!~gMg)=7MN?1Zc zIz_rux_3cYLZ!Pwx;x(I^PS(ke=@Vo&M-SXXV1Cs>zebA>*-V?P>+2b`yK@}f|)L~ zSyRl}tzo3D?Yza(tt7@h^MiM|t(cwc`5=Zt=WA5DR$0aM9gG<}!PSz#Aj8q7B3WbB zk&2Px97-fZ#jb4cr3(+f?B_R=@ijW!L#z0hP%(=q8~jvt@B@Ozj?kLnW7d8gm3I3E z!X?+-db@%n%@b@Xoyb?4Pqu>N&0{HGo`{oOVgS4(5USsoHyT=FLaT$Q;m#>D#q z{P{P?aow7NhKddd;Er~ke6tgZf@STr8fiR0telY=E8?pOCaI$5BVdKtamL3505e=e zqqm$Gt@SO%8zduCzWC8zGk?n;sYQ*+*gcdVO8u zQaV41+E%yJjuA=NuU3+ixFx2}O*_>q@tMIfVdt>=dLk$AlHdkNCQjq-_bmnJS0MV} z{JwmTEffZqcHJYPu1z4F5j@3Os+qr_{Vu=Ig`Fn@HyO%EDH`!g>rLXlB z=FvkzMJ-sDaX32boZo4H=}db#&X-SG9YdAhXz66WJ3_UBzw)?6j}8w7OtgBJf$M`< zqog#nvo$(opQkb`|P=OjSP>}U0#NT*rVp0 z1wOmBq_BmnrB~spehoFWdYI`^ctYYVr+8vfE+D z^(lt%fZo3`Ri~Ur!%sR$a9|FwvX(OikiXGjx&(fB53cfUQ-7>{!hba+R=g77A)|bF z2Jo2Ijc(D6m0B*%q%r+eD{|J!!HU*2m;wphC@!bVkZC860QyI(VJ6rKDt?Pe4A;sh z`zuKDc^ZV9j61xMecm8M7gj{D$$+mM}K$aP}w zEv9?8o0coMkXD5g0<4uRmuygPzVUBG0o^3N%dFDM8M#`I$3`P zVSQr5{}3G$BNfIGl5jdaQQ~)4NN03m0P?8`(D#fb8Y_kih}Kfb*Q2SHPa_t^fwHW3 zJPF(`kB;ryF1zlp9*GK%itVJTO>E}LU$tuHwfaST%kb2XsBiMEE-NxKp4J3zdRksQ z)w=ed=hqBQUp5jpAEHFTumBVZi%osgS0Db$&y*dSJc{r%(jRzp?oP$|$jGX2H4 z;L{^vSF7XsAN8(}_p@zRC{>nHD4xq9G<>oW$iy)c1EJc~Xt2KS>eEddJ4Apd$uzGY z#e$(s)FZVenXD_5?fB3H!2+U-rps*F2JMRtp58as;jWt#r8fleFIgy9*;zq(F%%9( zK5pS=e5vPXv)wLu7JlbSV))db(rRSvf3Hv%}PZ0KXZ>tdIwn9k3r*vwpNmDDe-$Dz^XwS&b&5piS z+FS8^k{VX=8dPU-LPreCfo?#nPhN8_kJBWI)v)^03 zB1Rnv0f-loe^FroN`_T!MF>}g$6Ls?AvZ=Wr`PW*r#j?{C}S)t`Yjnm3?S6lFJCbN zhGV%}fY-tCdb`6CuGoB-cS{wQRhO~19!pEl7;p?&N%u{LsX#lRKK21A>aYAjk4A#{ z4d3sOqRi)v-kY#;73N*xL@hj6rYJ3J=epoo)0dS_ZFeAH+3Ai4Kg&`a;#o#i0PwI?~a%a>fz{u)ma*Tnww>0 z+pkuTI+}Rib9xD{ek~A{OkfbTPy`eIDPa-75lTXZwOG%Fnf6=iKeL`L-qZziSf!l{ zy(FXF1!`w6`1Px3Q2O}#8n;;uddQN=)JW-_od#^L914VoZ2BGcGXRf@qWS`3^SNL- zNus>GZB=yIn8wSLd&1!qJ5)H1xW=0xAyG|zSWH8a8@KF>okY8Gs&u4ihU*1B*VX+6 zLvAFc3VzR!Ss1Ndut{WNI^bMvi?gvr8c9`#nqyC{RW5EQtXo; z8~(;gA@`i&+u>B-(Q8emtIVRzR-Y%(2TSR2KCGD9+(ARp@!^{?&9D9=IV3mbly2T< zuNghgJxR&z0E33APqz-4-6jJ+7rBG@?eH*K$?G(CnEtw6YbZG$cYT2$Vin{21XFr&8XEZo{-GcEyF*-f3zsC zv8LY|kW>sa{g#pEI10x!Fde#T8*d}IBVr=rbq6yP~CGhM9U!2s90ONu`FKjqL z+mb2Q<1{@Y?A!I2btWa3RTF=O`lUPaT6FWJ$nqr>Og4rtDnYs{wE1&^Q=M9Y&WX3) zr086FLcQ;TxwJ*DG3730^3P2exfkBhzN8?3VPA=R&pgqzkl~)?i2}v?fCYVTCRDC= zqX~$T4RN4{ZF3=$>G~=f@B;8RAgJPy_)v9v?t)Z-Ys|CJUS|B*m~aK25j+$#L~FjV zOIi`AE*-z21k=L~u|By9AUZ2z{*6$P-&>nky8NE?Tlvj5N>`*ma}?jLntit(xYj(C z)RhS1^x{9YyLO$AbA3q>8j5eFUc7R+@I%-T1l9-&>I})lG{rJ6$I0P`pyvf&8Ka3s zHtOx;QGk$u-H8gY2yt5pzV7?^^X*e250)pVVGVS6_)TT0rG?mZrD3h(tO;jCe#o7} zXwt8GH}%L#J!xenB}QRk(ilA2{@e2g1A(M`UEZ|+deKMbH&F=>4-ffzH-c<#zyU}f zcE9=&pYyvpuyyY5?{A`&jp_M*;yKp-@NNizOrK2ZYB|)b0il;7MQQBJDo{@dZ4Vwu~zcc*sjAdH% z667-d0ar=djo|Vb9 zoOZ7l76Djp?i}Kl3mF@&I?TO)iEnN;Eb-!ve*v>g$Q)*@F*3|1pSp&5EtTcGCjiWOO+#09Z8V4opMS8FEGk3wn%a-vOJ&<_hvgFcmn* zkXedsk(5vCvL8|uz-+Ircq2J8I3e#%u=yJ)9EIAdPE8tDMH*2}2<%y=CjI~&ZR_u@ zrZ=rZ3ePoDYm05zcfK`|^5&FG?dZ`!`l(;#x;BOpAp0#;ib!?LgsH_i+WUsGG`YS* zdz6@oR+}-|U5WCK&t08sM3*9=F#PCBUvHrg)Qd0%%&Ft>DN}A}_7%T+dFoSlcvs{2 z{^Kjihk8#uVtTnC(E~&BhqUgGOTd07P!Z}=oAhZp0R+?9K3k!d4X%rz*AtbA4A~e;E!0JHGl-ybEZ(4)(wz>!B1l!UyY=yK- zm6kWmz9TP<;!_N`@vtExA>NGQR#~i>i=90q19W0&Tk3}E^W7X?gdKy=|A^M_ z7TKM`1vmIznj|B-+K2CfrqQFsdO64A=@VDDk@ybdf!Q?^UdWR~|Bs%i3~73%wN>X= z!22cZZ0FK+kzf(V1JILRWLSJWR2)DM@7>1YHftP3@sgNEmL*5cHvQ98551+lxS)O*-9p7_m!w1k-v z99`jkb)p#P@V9S(oHpZ!Ujf^^j=9vZ`CdRlV+WhWNc~2b{O<00oPl|Jk@ClTo$df! z;fi=JJ#U4mU;3nEmd#BE<+4YRz^S+$EibY&w&U+g?U`irbGhI;}yhTsDta{V<^?iEY1DynMI`7*|#Yh zikM63q?Cn%p2S0hog*d{5}7hQe|=ewK}z!_k-v5%4+L)?LZUBC4IuL>iM@WLSBKvv zjPIJm1kZI!s{=5|^}uFR$Z!pJmn*=UwEqQ}<2HSs1Px9;_N@Sf5f#-Yrk5OF6m#yb zUIh}Ry%jGXR*hHGwEbp;{3nI^bBZzxzakGc3n(eFA9VZ_FmbI5RXs`ETF@8ZP%?eA}* zy571MV&`b!SyRb3V{uha80#ZSx^N2}IR3D5$l&}ub+)Y1=0%6+#PjQmjI^p-SVtF& z7$m<%b4SJ&mhz6QBG#X6P}U>2Q(k?Os6$FWngIJ0@k)Psxiv_k5F4DM6Kav>q(Isa z%EA(?4ukzBDarEGCtKk}i=TqF&-Pkxb98@ai1Akix<`pls^qkPGDYz{nLO2NQ)TSwap>q3P3|cDr-=0ui*^+!~si(;=a1;5TNR z5#`RjDDwaHr;kaVs;pjx1M|v1yG+Z6RdU6$tKf^@D#AJUcbEG>KI&}q=gPy}!(L_W zMLc^~z-4=BMCMvG*5}NThplwW>4%>fcr+8*XL>CEL&+RjsG@AF`nFijb$jJ>2Vkr` zo><;=JjnEpOAe@?XK9Nu?Z2|HAdK#%q}B(k7oQe3&glF9U)042h`K!d=y_4wV{s6p z4BH>eHHcbi1L;RTRO?KVleV1se(awMZ8~wyJKs?o0{}CB&hD}2fAEP)?|pa3M&VQ^ogw)VA>|GgZ&>un zk-18td$Nir6#w8`TT0jcbhMooTdQv^@;Kep>am$tpLduiEo%GZP;$uS+SDi(-I7y( zaS`#nXhme=C|8l?4i&{jH7mMzEncmKFwB?7Z{+t6EnL)#N~|6kO^WFrx(I-9qYb?3 z$ixtQgSsL@@oyUQDRKZuWvPcZdc+&R;Ye6Sf2Wfuz*7!fOsc252Ir`tsGm7NV{DdF zf~l?U>eBpzRV}&wg2h_+f?xcW++kvQ5X6-Snxzvx98`I}#nILxEyx_7N0N1@C`28@ zJhhTkT5cvllTDZ@rL(ixHp(>$tZNJ5EU|YFiU*eGUuGLImoBKLYj07t#vX89)Vk_f zJ*+P_&$hXUe^DS~<_Y?QR!|&#KxM<`Ea$V%YQy53dRk?0xz#s%3|?s$tqWZ;6{|ay zuE(!)8tpKWekKjNnHQ4c=RhjLxK3?$h(8UY#BT6nhnPAyhC!Jt_5hLta{uCfxOCgKgRwmzTWk% zW5`93?(g58psWk(NHX5M-2=j~?x((8S!eD$JS=54t#f(+Fb&_IQUNiMKAg*{k{yGwHO!jIwsfjS zlZ3)j%0xg+l*_u~sMKr3okkE)9bIH#rT!kY@|i;36&qZCA>S;p2D*>MPw^%BW%f7n z`40xTAv(vxC5->^%{?mT@IRILd2og9KeeEDh19-s+blajKRYE4b`At?GaZ~?6V@Lp z4j$xHTDH8wCJn`)Obp=oa>b*(Sko5E@M`H6>)Z9uCqh}&$qV@msz$B6<0q2Oh@V`C zxCf~|_wTh1b88F4gBE~`62IDAcIj%yPUN^wpp4Q@jTQmp216&y;+7QY-n&m~qz%X$ zXaYX`FDYVNp5O3z9CJtYpdtXC3cSd1E%+r#Y^7@EKC|b0JZny$%pP$#Ivm4BgBz)m zz_i9?!xJT|{MXm^dmxQ0k*-BJ+BPf$mfn6_;)=X(pV?SL4CdRh8vU3o7JeL7VHe@7 z)~9|cFjOM`1uOoVFYR=~W?7&_j0zBbHr@#mWL0dDP+B&)2s~5x+c}BW^y8_)*9E#BSY>Re+Wl)oX<$< z2dcS-+_US96y=HMEe3yBTJ+@)V#ZBfyHuonBm*+KVwmH$PCLbKx)w+Le!RxxIy z`kO^hJFV4;-;iMrDQMFo@~ih-j4~g4#Z1RM0f$#!wrjtJ;45!cWgR?gg$)X{Vaiyo zagvc_iGSF!nIjpi88bNVt8WwZqnr!eg~Hvf1^OWy1XPs8y{)gOcn3;D4ye_0h6K(ca%kQ=k! zecR!kQ1G{q@3cC2xb0`9dQU@W4Yix6+^Mlky@Pp%_(mI!j>Zp_ts{<_-e=M zCA9PFlHc}Tc;m}eK)$!Sv`U!y=8DLjSJ06a?7FJkuLj~}B-v#cH0Ir)Xs|zP|0i$% zwNe|Pb^_8|vN(qbvmRd*p`OVyPF6iuGanzH=pR3PKb=efX%}vkuK(s1KJsm(ot=mD z$cU%%{uNis`P;)EE7JtWIK;VMzC^qv=Y8M>=iS0cqhota%l^fYaye2sMh}$i`8_?{ zQa$An5_E5dw*PO-6Qbje3a8_%(jgs+A5o}hUIXFfcNB)*RJuM#Q(Hb#s*J7lc5fC@ zg-G1wR#V);Sv#AUNOa~(n>7K#(0>Q0q_mZn6vVsLCi=kG(kmR;qQB zVHSIo#%6T=TLSCGy3P_tsj>V4@P3(kbQ!r`mRWSo&S=T&E4V2*bXfeWn}YsvJ2V02 zl|R^9mZtx=eBqAlEWUnmuJ)W3oc#H8BN*gWroYDfzxm`F_69B2X}^sjE~9i`2PGZV zB92yqeUjafSD$WC$3`I|j_@NAaAO*xsAZ-2r$Y~{Vb*`I(q-(fkoDyy5D1mYM7K}> zJ((na%!YKjLI98ZcaZmHm_)|z0@1iSP2><$;lzt_)?bDdh!^>#tgdr2VDN|p|n7|2jz*^o2S`Tv8|?@vH0ZIfwHBB;?q`)?sS6$H~vm0&|*qC9I1gFr5{%Ra2Xmi55atXrdm`3z2H@kwL z@2>0YfRj}Fx(j#SEsjN~4>xNPO=KY*?WS}~-v%TAvK=38Bh6iE3ya9QynJSmw% z1f%@7+Gnzkcj|CXWuqJtB`ekqJj*J_HG5o9X{{IT5&hQ0c@i?}c`*b8d~#m13WH@l z3QbL)QUfJ_jKi|&yU>s8Hzt;YK_&pdE_cDo>zodIr&q^~UYrm`1*(`IM!35zj7p1s zSCG)iW^#EcH`48KuoLA|^YQl_BsKzIV(=b4?H(MJ*0pvV6G*qvZx&Dg&fWgRo45SV z5-~uWi_unSJfvA>@L{Vl-^kH0Q~GJN;B?rXw%N55?rkJPR=bsQzCiv&dqiE~_m zhG4`%&NLs1&m0vO-=}xA>)wkhWbVGE($(9iX{l4Xsrr&-!p>LMi;zGE21YtJX@tujSJ#&=Vc` zSZ;segF1;#X&ir$7xtL|adQ6G$j$>2n-Vw^GYh)jtu0YiTxQS!tD$>{trXr)@mt9X z=msg%&d6hb6)GCsKke7en~*rNX#rz4mL3j;AVCf07fvI_Rq-MuC2$JWakgi5;C*NR6urFlslyIUNy80S z^iLMO_I@cCj%+Z6C?D;V6K9;Ox%j7r3OT6+K39q;z5(sf`zig0e~EwxVzw%7lzK;B z3Gmx?_^h>V^L2Y~K@tDBYb+!ZJb`@g>}Rvhtr7-ijVt2oq=T<1Dh+x%!COJ$i5u{( zqCdxPGxHJWaLT{(eFK^ljhT%FMY$W~D7WVtWKr z=m_wpVa4-UIp2RNiZBXxT`Yn@K-Z(qdBi4UhYm3T9*1JK(_2r?U&|w*=pc`Af>aMn zF$LArQpomBbO@AaSwuk}m(u03xHkj9A)eDc8s+oR#%%o+u{f96aZ0Gf+^m=jPM6xT zn+*fgxma_`XR6&B`1-ApJ)>5qAN*2VfXv@LoIKdXC7$8j z!CYlqc81@`<&d8&KbwD6B}=-8+E8=j;!{wQfWdl?J4Z(~d)=Ri*&@W_ zGAlEHa7>5aKs4@bn`dcsUP;z0kRJYDCn?{uY|u{3Hu}A-pKY{jzK{b^iAHwG+QRea zvmZT=hxtU&X|nOW=IyA4hKBm&f>bYh+ysK}ydLKi4hZ#Bo0*ogjHP@ zX7L50qHCYf?)iNK1$+kS73|?y|Kag&CEuM)_M=cdP1^#DSw4v{TC5ni+iB6gQXodt za2DG1V9}fLJWWQDij%Ew`4PjEG-O9XNszH80(QEgcE@&5lyyBr zvi&M;rBd~#+yFO2cBNuf8xwke#ZzP0CUCMI-$SSQ^anU1B`m$9(5(l+lRxkD#sr5h zYdOsJMVHzC#oRmeAy@XBs{%RV(f`$$x=hJ7%1_5Ab{2>rQdAq@=V?E8FJCFy60SU& zB}#m>7T9aq=z69YA$P4WFs~e<89(kZ4{ylec-AKMjcK0j>Z5g!d9VJh6?w>Uszml54nwEd z_-j&yrW_#Ddm?`ca<&S_p?-CUF8pUlCOCKXZzWY{pkBCE(%#9TF8SD@6FcZ=xq?V4 zAXb+A-^Y;qDTN8SJLekzxEQ>$1jawtm^1Sfhto*R7(_HP1E?z{*n^abfmOc5Xan*e z_pEcYbftDzXf`^jRl7_wR)b~fJ_jVBw)6`OGF)Z&sVj{XF7i z;pkUieCJ-IM6|D74oTOo51h_+z1x1g#Th)l$9xO+Or~i=D(4?b{@hAt{4;yA^#wb$fo=p6xKvOd-etc6`mgXj zzh2l&TEM?H)q8+7+Y!5V6?vL$U{0)IJUL_HEMgyUV6!i{t@+Vhw9a=>Z?zLoLn?Mpr|3CBNd0D zJ?na#%eT*{oz@5ORz~}Ey!T&%D+@BT=cNqG5d3dwId@%jF?n46rKr9K{srHq1hTFG z;8|T^NlQf;YO78YPa45g4R}MYS!K={U3GDQw4gj;^~!?djHikfO96 z3SqXHs+$s!88zpIVBJ5U08vgDf@b)Wzz2ZQFW($tXfZ-T$(z0Q z1ITQg63UG58{ApbR(zS8M*oH5Q2lt8Npb%4ZjG8tmX9JL>+$b5U*ooTai9JV#sfzB zq{Jj6mmw%$^sAk7sg)XUhyfP|1`KbQ#-`c9 z?k%u4j@`re5Q|;__8+#kC`4kz#>^*3hlV<2%mJK{^z9KR6LaMVNp;Y#hfXZ^VO4E6 zvy7A~Z86yeWVo^Wx>n8Ba3I363S7A*#gz~?jSSDa7d6p`m49Nyj;H*Pb&dIj6kU+p z%d8A}OQ_-_RziGgw?Vh{{;*R;Ldv=#0X=a-IV*MeA!@std$VhB1{k9qqRVF@(4ATTD{(p!dUcR z>Eo++#eZ9aj!-r8*j2eWhzxqzeUX}KxBw37q6djJEHR?9S5z7eDRDZ1(gv_+QPFT|W zpRDa&uhD1(EV3VH(&F95VL{bCTBF2k1Xn;IfVT2g{+t?l;=m?acbFtz?`(7?wh+&% z32LZA6@+^KBr^-`6RrBbXAj3PS2V0bZ^zT=xuTvv;!UM%RGdIJ(@CXFynK^_&Z$$6 zDUjv)@jK8U!vSsPomP(+_)w8bzZ@_tq6blFS8!S!dG61LIo|E;BrxXVf?8_552o8J zoPzHFsV1khQ;x|{n4XI6rSoU{5x(%M#Bk3aVfDdU55#0nD+eECXt%AV~v1 zx9-jTDTXT;Udgvw&PE4Q)Md5aj0K;ZUqc|jvrIWco z5FCXd##I#*B7zJ&gw=T#LI<96IREEgswEGJ?s>X%8YS&?U6J87Yt^25c);msZ7p#4 zTcY$Cg+eti|NOZ!N_96QgJM*z8=&Al_G2-gx#}zZ@6@FFAEXyPG1*;PH8kwoV!c;o zo*@&Ozs*H=d9+g@M|ZVtE7f(wAd|$?+va4s9@C&b`QPGbg|~+sWXovg>B)7aBy8Ii zz?yS*Y<}Y!Z2j#ghtu^?-S3+6z&sZy^UZoN?q}t+;rtWm_xZ`uAI$qx*q;)iIM(170w^dHM233 zYWWg6cf2>nvhSIAc70qsk1;m~*bjJ2pS+%|(*&#@vH`sBBdMh5*w6rThjoUNG?XBl zYyzDRdI&JUya$;F%DF$3`^T}}HTT<(!zunPPi(T&{Xr*wr0=!Q-=?TyEUE+KBPIgEh?Q|G65Q!<`N#G{fCUru z)5q++9D1sGvJF6UJ_s=Owk*g~9Uo+Q*lo-20T643p09W7h_Gv5jh7%FbWz=Yoos%i4E%>-Tw-4IdRhgKBo-ZIegR(tST;V_0~VT9&yaeX<{QMl z@mT^#xOyY)iY71?`nHdI;TO%W<@QKO8GA{DxC}Hb!zy#aXg*$tl6FKqmcyC#HJPj) z-q6p*4WIG!jiAA!ypI{?o&;Pw*dAE%q(lqK`uvRiUkB4j&9nLciNQp_jymdLnC|3o zdD3&eEGTy<2x4DTfUa>ULlR0?LandsTq`vpRt-;peCWf=&FBF zJ7y8>vM-~sTjER55Q2_Z<&TSD*b`l*ML+bZFZeY0@)tSG5-rl|<7 zUXEj`QS9Ct@t$+Rx(G8t@iPT^1yZJ4#(zM7gs44fIu}?yZpa~j=8m$l`g`Gf<~mk0 z12=Uwu#C+%gq1tZv)(KUrjs=y#>RuwX6^rPjU5r!f7<^lM<(mT7N?9uYJ0I2H8=-1 zBEgtIDG?h=`u$}7o$p!R$6%gIXDdVyjP*gNZ^B`*JqTHpmquT|Ugt7P3Wji~V6<|j z&>pyR9-;x{D5x+57~wXFDyjF{vS%=7Xov(r^#-Xz{gbsee%+>s1)dQk(*08Y(1PWZ zm-q18cui(yVCL}S*|TU6lZfF>*y>TuVYKj(Cea(W7*zbD=~fXTA>PBIWsQxaJR4TZ zc*Ns&&2z`H`P34~hBedm=sLg6^Xj~+v;shs1kT8Kb83Xxb-8|zF6d6qhAtR3Zq3J2 zo!$NWsOEQf(ttS3f{qY0iJpuaHf(iq8VNa_UJ_?^45O=J&wQ8eJ3TZt{QNc0AQyjZ9MGzB@Wf|>kLb$BzE}n zQm=C6H}|WV3o=g!S6`YbqL{kO)cG%~Q=49S--6O-jNg^16b#ux$7%9)&#SWP)*AA! zAE=O=80zO{?4}%x9@V;dWVBGT(9=9{1oP%ZhvtWLuXtvNPKbE-&N&y5d;g+;w?%Zc zk4v=Z8zGLvX!fsnu=)TZs>qwI#^RY1aY&ai#@GttuT&`dME2!KcI6cQP-CHYo??bI zy@4sQNH!!hnA&S$+?NhCedVPz!avkv6UrN4F+}>7+MC>z7joqQUz@MzJK7nSe}Ad+ z265TpyN|3%hu<6W>@wTnfSbl=9%h`c1#7J@0tM|dT>M)?$(0BgfE7yvL_27faAlFg zExvZAkxr*hS@MJA2`q7C=kga9ALQq^Hi=q0V5SRAMWnBv+n_+mK4jht3JK|%_W>3j zNOacw)*1R+d?yum5|!I@_J?kPCZPfOHc7#= zXjc_SPBPZTP*Acfz^Zp2*apQq#Wec&RQxgvT=bGmqNLUW&2eVzI6FD1_b~lpJXBOV z2s7y7cjGzpYFf{Lg%PG3n?^v3w33^8vS~{dq2eo1d!!SU;1-i6q;?;)Xxxtv;g=)R z_bXTw=jUHSJ!L#;X=ihuYdLzzD9**LZjN{IKQbvXgbqG6?8rUg8SYxTsgLEJ-BEs9oT@_lWNn-HPtc z$qBDFNPBx_rux-gYiG=pKvK8`DQNg!5$7VUS_=*5*o+iK>ixmhv1*4k#qWA1nak>E z9dphN=KO}6GDXUu>1N~-{14yT;_$2J<4-=ue|8H%`7$8NthAKsh4)wKfg4bvaItTD zG_QHguaUO8Y=NF+`FbMhNhz)eGK+wERU55CVRo{cHQF~kgz{`&Ip(tX3&xHT29Iaw zL3&fp9Z}Ve>fu-caYq#V=Q70y4Pm70I1)mfbT%B_TWqI#?LSJ6eUT79{l<<9>a3y2EyNq_& zex+2YDQ(lkyeHvb7(nf-=kPE37f;6cta*|XF@@8UHJ`)ACkf3X>}YELn}YBd(Y;pC zP(7EUWZSpy15uA_IbtpG5!2HZfL&cm|FAjOdRXL0yW3oiZUq6y7I} zoJ^lSAhc?gohPWIr?qELCkj)cKTole-mCe76nTha;dGV>e$k3)Jm$4tyR2?>OlZ|O z#X;}#a&;$_2XoAlVwp!SD3c^w&zBEXGeOti zr96i=?^##chEuqzItK+@6%3H|{)w)yD%|2B6pdEX1%^nmW3WR1;UuPsv!{Ly5sCpV zwMEu7_;kvm!Lehv{P9a9=jGnx<_2dKR*2d^SuPZ+myIOP^G7xh8SI%1<{CL6wV}B| zG>sN?E8ho2KsLCyrM`LbdJ&IMOdDZC&tI#Yn1~z7HSp?q*>Ztwp9Q*O4|f@C+4uZz zWvHyUk6W#bI|@bx#>i-7n@FjBKMXWYN&a?S{xDi}#P7`)WOp<;$>LNWIzp4P$dAI- zqN$TRFG0E%FVp`%i4}Md-mBTI#q?l7Yc2I|0!0KZ%m*?#bg&Z=2E^!a#t{VlsBmKk zCUGr$bzgpOR_R~Pfyo&Ho#EKIJj8$SIGE1Fr-QmsI zKZD>N!3La)`a`>Z?Y5Uf=4^LX3ZGhFtiiUpUGh{rkT(Z?ed5UY32?b5sn7yC4jAI&M}oLPCnr z>t@{eW4Wbh{2Ke&e{8;dtN)hbZ{iA!_9z1GM7>C4z^zVKknMuC%AU5;WZ(*Q?*$_6 zl~rAmr~kdKR0GpdBK{og-rYDG4I=~DQb!GTPRD8+!S1Xa27-pZB~K6x!eFcGx6V?X zp&H?@E=j@E;0m-rmyBECeCD~*vwXpQW8$rF@Y0W3@_)i2Mz2~orrD3%w!8yeUQi$) z{jwI2Yp#XP`m_0;-CGp4`RLn#RHJxor(-R*yCvz7lo+pE0Z6N;pmt9-yEkn*>&%Nz0v;uyJjjlSJVGK+`(@7gfdy7 zkHTXr1nYef474ZE#!Ppts?p@@YdkHakA-DlH5ZA+6+JkSZ{u}`JW4PzGuH>-`yle9 zsXt=iM+1;O$H}tr{-~EdC$#d9wa=G5LY?xCaST6U2JrbDw8+9`z(l`cI{IQGYwcpj0v38}}I`Rn6! zB-WlFYaWbUiO(Y7CEe7{vDw-qx48&`g!2)p$S#+;)r#@H3r3> zHYRl6nlO1<+1@(duM;c#C>P&e&U7?1aU_LU&^sFKjk%2Uw#kFN?2Q%f%44GyP450`uu zNoN_=R@ZIe;_mLni@RHKS}0oF30B;KyStR)UP^)D?hxGF-QC^cp7*=IlaU`8$=P$S z^~^c*<7DB~F;c28z>F|C$u7t#pXXf*CiTbMtPPR%my`YN0kt&sm!?_B&tI`@EzJ7W zZDY2Qy}6bV@Bn!|-%mEFs|eh)!n>)p^7&dhiOrW-QDp}<65T{d99wx9?$d`!3esMw`!Z>Liy<7}U$@L;( zp-Ui5h!zV!-VehU_;3=QJ3gOmhR6vvAv1IyC$RdM%bZKW8#~;v!jdn>R59I&9#ws9*IVwi?y3E44e^fzpA5U%sPZpX;sp-E3 ztHr%p?o7kfiFm857E@aZg0aEMMk}q7ZeTP``_o9PGCh@oBoam(!Uh-FI$rXk#W6K; zPW8ep1gi!xLDOKB&{2ygBY*ieb3d!ht^VrdLO0H+aNC4JeN2|VO0k_!t9~nufo~Ax z!M63cr+h<2n>Jy{8td;f6Tuim;O6c|0+5eY%~6J4c=qkL@S>$3$BQ+Yn6qDz6v_3< zbCcQmRiqAxzsb5T0J~_|fl_+hzt#)~y*6mGa8WuNEMh~6IAXIq2<6gX7Z8y;!8zxBp37kJs{UU~(`M+G%Q^XG*71;@#j zD4ajY=I9jb;^-`o4Elc;Ku>%|NvWrFIxn1~JPH;@fz7Aapw2eB{)NQzjb9$}%~FAn znXHndfBvgbeS(Etc@%%`cX7v=$yr39ivqzYrL)Y)kMG)u7GuVOSwXvxsLAc7-l4;> z7(d_QSf?lEuw+xHu6DkLOdsJViyj`u2=^#YpcKU1P_B1sfvN4KYQJVX5{aqq;W(N$ z?ZNGfRy2@}Ao~l*E~a}ie{eH`@7>r{5JmHp^kxmQ)@V z*rWfuEHJLm{940y8V4Ez>*ZTQTQnK;`bZc{{Afxxq2OiTTXJG9cb2%28Jz^A0@Br) zv4Po|+DtJEzlS?Y{rO^O)3Mt|zI4mPEy|wqI!eV4uL+MnR@AH(q9_g4?#{9rN0Vm- z(sejqFIoMZaok;2$D)V>#WtZi$xWt%bxls^ zP1W$)Q=xf9<7P6OLyI&kZFYLS!Mq)>bQDXw5k{jwp*g>bV+3GyMhE~Q(aWdWhM$#? zezFuXKaWU`#81cB<#Zg5leG!-p}o>+I3g&H^PZIG@7m=zwM z>*WUARFUD~pp=9Vjds8pzYtE@bjVmAPz5!}clmiQ9?Ywm)iv^kwBRE@OjR}Q1}#51)R5;p_ov@ss*`TlJ&^` zk0*}tDll3@FrcOAaQ|G-2TRmLD8+OGzmFEOj7Rtq{9!S^1i|nFE>dshr(a_J8_s#x zKkG>|u9F2t>5P8YhhnfcFG8;Z} z(Dolir(B-Axwnaq>u5UYaiz3AR?Ue6*ECGz_j0~oB>*YLx;0uX$ zQIU(w3(8b(V^s6;^iaHE0+Zvvbqnr+VPk)V6P)O4pzmnFjH7`@ym1@=3cf&^hZeh zf?Hgm-l;u$Y>h?_U^2&hB5;5)4+fg;D-~aj2!d>?=YOBtriw{8{Y^nm*vo?EMe&dE zesgT;E1vCg>g0o9AP_{*>Wu5MuKL_Ze zpG|7~(ABv$-ZG9ItJVip!Vlb5rd?<_L$hVp;Fmy?7Iz`Arz^98W;6Fr9LcsjEB{zC z*gRSge6{>e%1)<8=l;_4=5@!N_Zul^QA|9uqa$F|&JJQ-A>xH5iOE%Zwayqk%E9q@ zG<`hw(Yp8YKqi}Gd!O>PgM{6D;LP^w#mMX;U|ekMIxwa$joobT{OXHu?-Klmadz7yBdW2bjY%vN|4^FQ{Jq1%F?(b+#qksOcRy9OlT}qZJ z+zakK|7|U_Cu-hOeXy$sFS10?|6#oPp;XpZ;Xsoj-%Gu}aJ2+vI{%Mwu8LH~g*8sa z>E*{gG2wJg?2KmTMXb}g=s74{6Ulofk6XsAB$zF<7P3S%qW(f8@v=|(wnI!4VjJv; z`AihD8x_yLs8RGO6WuhyhNw9`FJBGmrz9WM_JryRExT_A|J&7-{6o77lHPM86L!6l z>k`GLft$T0-)|7cHT+ zA>5N~5Scn1W!55fW*-yXfVUXom5|tqA;9o9@a48YjT;CBkc*{_){AOn56!&es^m8% zqY9k=DD2Kdg%~0^M>6k7&c5bP2sdcGvqHSOq_`bE&4lG-!o&N(0{QTGR)02yfF|6M z-)v3r-F_VjtVfvK%th?+^gB@Oe9f$tSHn7~>4Z_b}vpm`fyqdMjQ{FUjKaA;ee;n;{ z4^iA|Snsty$L%~0?p1vsvbMJjKT6r@xMUmL|3}4+-*&loT%=lz`gH5i`BZ>|>Fr+E zY&Gjxf|Evm>Er)`_2VB-deXJ|t(j_UN!gXQN)wdAn?R?$B zt&6=c{I$NMisK|NnvjQ1)jjq%BBPi69A8M*F@XcGh>U@t3tIm_lvII;7!d#00A}&> z&~OdD3aBo4LLBmpV=dXwrMJRLMqk}C@nfz9zBK;Ih~(l#@8-w3vtr%G4RzvW1%ryE;9oc;7cm(fjjmPOcP*(DU;k?2+sQp(&=QMXVX6u z;ZZOKW>7591l1tNZB;??714?hvYo_G%nghv^$g*hoF5ksq`|)&@uRYdl}qhFhqB_F zl+)WJ$P=m54+%*n4~MHL4||*HFME?b!`cd%Zd~t6_Ge=B+l+Lr&qtd)fNt+qI^%%7 zRTpk{L%S#PeOmI+6HK8USHPZ11Odr9fBH6TT%pT{+SXgZ_!#Fyq^V4gn`T5A6EC4F zDI;00IP9X-T{0-Y24Ai|I~tJDo_=p|cz;7P%@Qfu;G^QuUhQ^dI7ALOB>k&Jp=hMw z=BSEHg-xg-sNQ>A+xAIiY069i$URhWAWgKn1F>-QR}DI6MlaAXA|+HgvOh+#XxTt* zduv`^S1v$|gkbCT7{+kO`Zc^BgVHznnVE4Tkg6KpDN*7mR^u%WeAjC7evY3LX)8~M zj}zqx=#EoFJt$NwE%HGg6xa6iFjFT~ObxA{M#b#Kx*37?gvRkZH3x&c_S%H~+qBkB z*mM_CoYu9D-{aXthD+BkphGqc+zjO*_?dJHP%6&`={XTQ<2WK&3okH}giWC9XIs79 z^WYpaBr(EXS%M_{RI(Z_Wg8`q{~ptPniJ4d`>_sJUVy7XKv-d9xD=-Dw{6AhRFjBF zK@zz!9fn0%i6T^Q2N~K$;jp-KQAp>9r|{#(g?YLUuuq9~a0W|6?R`^jP-LmKp2lve zx1YJ?cy&T8CxhbiyHVN5Zse`5C^snX@TP*c{b=tc6 z2xRLc-SEA4?d69-$4>(958V``kpIRnhJRjm+59FmH&73SJ#Q?*Fno3^gaz^`b8h|D z%5xe(lcsW+4o_oC3+UluQ>)Sl%JqD$%)|c>n$G*JF6108G(Rpjwy)lHnH_xe9d52n zr|v1^+T*ltUb)~ap1Y@~2|!hQD#dPnioL2(&qe@}soF&NarW)SIv;i85}Yn%H$^1TvRO02Q02YDrST_n5eFXOAzyNJb29qbzr zi!679kyA-*8z>f->O!}dtw}f3_fD=h{el`VH{%GWtX&;8s?^jD=}d&@?pYA!;b^zl zS0Rf&(!ip4)_@e9sk7H>+7_3@DYa;J%Lea?(z1^f()8W_i+s#qGCqD0=Wq64?80D` zSQ+FwVy0tO%K=1B!{l?O`Y*_>)~L6rufBGLMVJIB%WhBY%{Jt`B{&$REEn#GQ9 z$|O4fX2TiczA}`x(BcQ9NTC(E-D18u@t0wt!w%cEB0W3?rQhhS2kN0Thv~_EUREjY zAK_#P)ZVwdzHBrK>hQcmC1we$2lziSLod{3YY7>+_!e|05*kwmru5HD>|+W`5hFXR z17FvoX0*EGy0*Z;?GVH9Ku0RHl%ta0rfse_#2MgPiTklX4~h`GaX5FC^E5>VX;5Cx zFlLH2L(6OWqOVhSPs|h&95uPgej_lPaXk!tveTc01zcu;sWuyN0yEZMqexR#-jYca??V!{EN9VZIZW$FS$NnkFw` z(&#L)C+L*7Z;wh@X9(1kLXMX97~9e`=$B{fV5|Kk3dhemO^hfcV+6S9rTE3||sk~n^HK+&*Wj&&S%{e7M(a=;H_6D~Xrxvv#D z5Ok&&P47T*)-3P)8xytBVaAJYn-xHN(kGB1FI6 zY$CYVnvnax+*t%%#x=Ta|Aq!ZD?#fAnuI23%sUY0v61y{0%Wvm?$;kJ%#0q0$@>x|oepgb4FdA{%Jl=anp3A|7v?=vLWG_c)q=~!L6i$WWt_&U+$7YK7Po4X zGyoqeLjuSPf-hR2`ZF42??($0Zp+8v{E;BIH&_gC&+e?jfv^7&LZ?^Wb(#SJ2mEq3 zhz7p;foT1_WN%~i@?l-OGSxY+C2=FIDK>}wy=srKJyh1MVyj+QNRu_YbMfRmV15f| zDxXdSIq=eB&9hl-jm2=e(_9104s!OCih^8OZt7#it^xCf+-HT4HlHs~g&8bqYQ6I8 zGRjuY%Y^3RvwdW)vBUc58F9_E4ts8xvAVw(QgaUaZ&^Zb_n1ocaAmsuW28%?ECPl( z2C{`!f^kTp(^E0ST4a5$pRDeb2AH3}42H`q%6wNSg^QF#{RHF+0wKgNDmL#pk6*lkp)A>4IT)KD-;Tfb&IU`Di(_8xIj55S8bw}ercXv90eOM*2qTW8 z`=OgtyPm^IrMcG}emxG|5N!_`B$LHBq6|)>DSykc1jzSnf{?B~Pr(45{R+4QqOVJ9 z^p)J7)5d9YRE`bGoSSETzh$9C=LmB@LSmS*DQg~?apKqd+Lj6MhRg@NSOm9lV<{Hn z+FE!)+fPZz24XoJNFW)%K+2$}JPQm%cdw>={gye)l_DssxuV!=v*Ce`svQjK$FPg2 z4~WwIoGuQ2X*m1CJc(6JY^usO3~VxIQ6n(mKB#qL6OCWM{FLn~uxwocD%$=a-NT;2rilfEI%FZEv3& z^sBsBM~zmM_&jtm^DgIZ*0p6m8cp(ej@Km|`j*t;<~yrJ8sr~`d1ntL-12VFmW>wi zaxY~hZ#gir!9>)B%D}H#S#5#&HL7>CuIeNKngZSc_ z=EPS80Fj)9LzmxpLUNotdGa!nKRnwf?FU@y+CxR*Ytz5WY&uKJdfcD+y*r!dT)CYa z0xxo$r->Oo(DuGvewcrt@IXh*cqDqo)*87IW`8R)ci}@ zZU25!eP;C*=H{|Aa*bSlWBCoHk&ht~on?KReX4DTRKGoI8Ml9fm50MsnFRIRfJ9_< zxKs0pNTxYRV(a&BlYi(eQF+eCTT}K#H&-v1`UMyi&2bq7LH5gA2d8?_uPf{Gm{XGf zafB(V2aD~gGX#=;4L=Xg0X4Lg^qDP9*S>+1a~%8N*Z>|_Z}lmkBNOiF<8OP;b?v;N zan!q^seDs7GQ`+^n(0e5;E|f;$OANyNc7DsCbeL47kzsHhlddvbJjt~>afhlOgS5> zrQo-Q4M8}~%e)Lg{0#D&BK1oU(WL-%h!4`bX^umbgZ3D`?pG`<{K8P1&*_=Ng0;|Y zVNX_aR?ypG$&CLX{I1vhJ!IK&GaGdS(F^}_`I)TfolNdG6k5f`Xig+!KS|!HZM!nC z|1GKg$bOgJPLNMv-&tQa?BM=vLwb`D*k;`a8va?O#p?|F*LMa@vZYpn?Oum)rV0IT ztV`ElGf4+dBf71?0~&*Xn$UM-UB1Q)xMb8uYlh)!V6wYld z;GA+f^77a~&DYsqS9j*ET{)?08Ld3-6*3XZIcr9;EaYIFSDWT z*#^YOsI6P7-b)vKrQ&>cA)yImEIT8qMhrc zDur^;VVHwvUyNYe*KU5U277}~=8rabcATbHmpq7Kt*BVRsr2(yyp~y5#&mfF5+L7x z(^1p5-)R;P3#w8S^fvl=%6umiRKa8h(GF)6+L4u?1wK!jc(3{-xG7W-ueC1Mesbq6d8q!Z%iZ}&l&cJ|+)G>3*bq&nSMu|kCc;JSdL(&Rj~Rsw!ERo8 z54T}0$dIL2q~#{Zr@WGLIS8Gw_ZyTS=f!KCB`MFXGZN#kn-&P(rej^Kt7YQ4FA zxed>U)0M#S`LmvQxRyx|I!UFBi|G+{Vbe2n1?^V+k~!9Z_hgTB>eq|pV#J#an)6&F z#0CB_%U3Pfa+0Xr$8Lwkd?1WZUgUjvTU?zn*?;c@UW#%D;-$E(qp!%5jW2vd+TN|$g z2)_s`#)|on2cTzB>C;L3VEct-*@s_x(LgUB^#RKhyXMkg+TdcG)Jg$L zK`Dcug;*41!8;Ex!o74SwPe0JeH-mFIM~jyhY^%c6u{ic-@3@UKFJX&ipwKTH6_{9 zpUW9<-$8$e+)1j*Y2Afzf2hrHB)0$d`bVSRHgmhLyMDl6gW>vk*bk2nF`s)so%3Ui z?dGU`)cXvV$3a=u9EoH?zTSg_M&yI8be`4FrD)i#d0#Q`hBvBiBlPoDhe+PIY9Kh< zynG7*85^lFTeLB>{~j@tjl{hhbLPJMAJFzZd)*kH5pi-O^?1@~F!|K&UqZm;kh57q zq1QJZmU20Yvu*NMUaQ{+{v-CERnE$zL$n3RLZgaD_C(aa0dwfR5A(hE;Sw!eH*8lz z#9;WBud|g?gK3NZU?sMHakCR}I1}O!%hF+z6MX0^)>t!v>xajl)+fy|KSGMW^;|Y9 zeT+$4=7+mvU&nlQa9G6LO8z?tVfyMzm_{8#1=(-pTc@RIqTg`YLQ^ zkTU+OGVy{1V?f6`?OqS7aVTBK+A{M5J{v!{N#0C1N z;YMaf;QYe#<8k@VchX?nN+4qe1=}{Gc^m!{79*5Z$6Zr7o zGV7fwL~O}~-*CB1&?a?|@@4$fi?NP7|Hb3Je^eBd^A%qgigb|-|4?Qv>|Bj`T6`Cr z_~AO^OA8g(boB1Yi-{YTbPByBkCz8jA{`VZWe4g8)>O8by#&T|0E-{YD|2mazmZV0 zUfTG>Io?}LpI&RLUrUZeh<_ExPrzu0bk|<=QP$3vLT~t3ni%iXi?@U%rQbr@ zxWSvjv|d}s-**+(}n$2;o`FPV$U$Eq!^Z$9{~ zhJmwlWoRot?U+N?T<@~yO~7bDc(P+p;Gr(}?h$Ol@Al~RLO>uCR_#-PIz5WH+i@dw zYp;jOz7|51g7fO>dCDf2M*{a|UljnuO_Sp~r% zOrv+A82rV>nK7mz}<;rZ8xKW>t!#|CPBjBNA2 zWK!yBW0roYI-JbberlCQHD3MJcW|hJwYtjqHK)Kd6Z>HEJM`=7kWrD6bo7ZTS&mC_ zB-GhC(r*GZjdK&QK_h#G_;HfqhJSj|VKyD5oS%&F~ZW^1-aJjw+MoqcT5 z1dkCzw6$54EtTfPnsv6dko8+% zE_N{nrvJ`lR7JAcpm7i(b#z9cn;6NU;( z9@)BQk}J8l5jh^w@B+tY7e-Y=2BxfZ_yKG3Ef**2DZe%~^tK$DbQ`{k_%S8}vREBH z!W~<0@Hl-gc;2_@bPmmhVBY0UTI9aG=zJ0lt*tpfLJ4EW@lHG`ne~XBj=g+h3JFj8 zh}HFq8`9__%KrCg$kE3_X~x-AQ%i5W7Y%JiCmGMbcw zs|N5!^}~S^ZStnZMW{bRntbAe1$yjIzz5L;hIh)FJP7qKG_XW46pMNse3J z#*7l6w|Ch8(4@9@1@{DZ@lKePn?jpQ{7AfCfEbXhRbv>)7iD}!a1^dUNnz7KG@%NN z$He*bK|r07JD@5pKXhR)b5~HpBxEC#-kEAR`Ck5>Thi(}^%QXsX{kC>UII2rpyMd`Gq7h6hZ;a3x-la$KN86#|BfGkCa)W(tdVoPNO9S4;l0Za9 z;&FSTGjgn%VM8znC%SW+sM)|liwi#2%?;km9g=h4nzChSQ)EF~m!Mr82F&RJ-P;)- z9V!o54lKXYAF)xjQj>?n5_k9gNKv*U#<4H6FU7&V-oEA8O5JTFO;dMN?Y`dXdy2|# z`h6|5uS5LFBd>#*(uQipc2D^2sz%;wo0(A2fXxCZ%UKG)DmQ-Cb!WVH+h5}`%@Hbr zqY)zP7{ zGZ2fbKs@07URz3$L#a=lAgTnHUDfLO7>!4m!Na$YVuzwf{cJvJb_!k}@Wrpo${w#6 zPH7rD8y>i^zqfu$7EJS7TkvGw$HT`a%y_FkV+g1~`Z9X$O6CWoeEEBujxCP?1QH4T zogf8& zM*BisbG*P9wcup|#jt)R!Z&Fp@-?YvSKEu~)?bFOyl>f8-I6Jiz|x3YxVA*z%jIw< zNF3&AYdWEu!}iLZIrUz^)owXFBazu4zl45o9`6TlHRZkS|@}`jG z#7A!HIaHSubTYy30VhB$1Qr%n01!>M;U<<+aYle2D_PlMI*9H4a@l`zcPBHN&T~08 z{!$t0-{^k1WmIk=7;7<_9&)<8OX14Fkv_e!N#zQ7!S0#RjZT264TUPQphjBBZI1TC zdD!~foIleZxIpr!fBc7Gc;g&zlU%aZo-2FDot+maIi1P{5Ed?{i(}`+C|v(y8;RVv z*M105fSC@l!$q!RrDDe2HK=lxtqmjek}mjHNAdUInAwsH(xHyO-tTQlx;FQc9H<7NY0x12uOq&PGA2?`O`+l`n4d5xbA$_hKLLYuIVP6A8A2d^1F=dc%33 zmivuk9ppBlUn*KFt0jbu2<0$!z-v@`^QG-N=jK8NG5w4erik@oV1+{%O zPQ6EX_@kx{=ovgovyO^kFV69+hd;VJs5gu;;^gNbo4BRasS2zPHpjt8D zFIo&JaNgS85N;sP7rq(mNZN*LgT*H51P8()T<1to82U*0ZLA=~R)#ZRVpBSNnGn=vaT!SaJ`F{&w%D2I0U*Pk7JY+QR|}&h4h-D$hk58v{=AVRPSL2I^)LhkpEnT^$k%H|yvS93kWP zP>yhccHFe8<%BGnZJJY=*iS-^ad@sed-2%*nZJAnzF|Nfqq~1o^C!A~SS)^4*B*RB zj^@I-r(#JqPCSfW4POY436Hv&X32B(hFvX#O?ELQnKV-{B4s74GRd=}@xPU1c) zAWb*!WZPaHuu7w;cB}e}Ppf(<4Kf~Uq~^N2yzwPdMO&yQN*gORk7@Qhq}scd8uSI|UUXL&|X5RSJ4;kZ?r>wl6mmr&j3C&R`K&rg> zYA@^zY&R#t9?ZZKj~_C8YOu5+~L7r9pGP1%&sl4Lo06(16FlQ&a&2&zKm49r#< z=@CG)*& zkww%|XrDwMXuG5+5%4V!05%kilau?L=Kl+#wMVtC7HACZD`6D*7_x2Jwk^>xaNUfQ zFh%#XGU>lSxow;e!@OGq$Wo+xtxC0FCxd-SOO{s^ z7o3``n@;u>K3H{ba9}=RL zn4);qnLYcM37YgNpSYx2&e9{?*1Te87VFr;@^-)!kYRs&6%jWN{FdgEY@@Q}!79CAp`1~TPF|>Bqs$Ns5K+K`ySOQ4U zu|_qupC8Vm8av*6pEcPRF;EQ$SYPmmso|g!((#YUnXK(tT92@Y)Vpd~0+mX$9?bJo z|E{WPtRA9?@JxAK{+;qYHPQT?OxMD7m9nSs>Jr>KN2`4IVl1?OCTih6C#Z+~oY z8wvZ#yJblDy$RL_AJ5y{3#nerv<@o{`&z6a$dZcJsb^ohOajdfl)}#KJN}~l0_{i& zb(w54)=MZ4H!4v~7AR)Ehdmp(D({s3iRL8}`s>W2nk3L_0 z`7q`|_MXu{Z#qKaG~W8YJatayeZTztu-breacW;0-_J?E9oUxd5rK5m{!@TBtV$ba znYP48dubmEWNYnznb^5XQ#?E(wkIumbhe5oc90`iqZ0-{UK7qIIRYCvM zn|7)-Q^(p4i}^cTKl}WWCpqDM-K1}+g;wmT#{y{iHC+@rbH5HYDel{V<#m(pEmcVB z)#H(yc((?QfeyRs-D9*h{~`UAP0_>4fZ(9V9LWXKI8oegYiLG&mlV#~TAPOk+2xDM z7xw`bQlynAkrv)g`&VRUeQpB7k<8zld0`kN!RBSNDEwI)6Pd~PIo}Ev64`ja>uGGD5@9F+>^98b@(AC}_hX#2%_pdZm zMGm}pC)vJOmq9&z9VZMDy8+4BK9b1yIxO1R%?%o=Jlt>m{r32(4BLl=r4aRs{s}Am@celZ4RA-H}N+XX4 zj=|{KWM)6bJ;2=q*M5h6or9zA$nCc4I7w1r_`Q<_x6i*-0fvHvK&G(Xg^?gkwub7$ zMu$*er!B}p@!TYJ9D#7YhW9U;Wh95lKjR`zhUkm85BY+c{TBMi8~KV!-!cs9*sBa- z1XwF<`-U$^#7Ehi+`!T2DoAQiUnpQmOn%zX-xSmN_=GF@Fb(k3ae%bcyandaboVeM zHl8B*o3@>U&vFXwq(WO}SF&LH7-1qogp4VVMnsWuZn~9~IU$FM$ZsI&J(I8+vFi!8>5Ua3xg6}Wy~u8tmFz)(HeuNtO^R(VVs!Jg-Y{F$hmI# zyC?A-bJ#7tK9$T-{^-o_X@*C>tDGquNokb`3>?b|a_mG^NJuIfSQNVmp}BMH0N6|9 znYOhAi24uPT50l96IO72!M&aI$=fqkz)@nx3hyn5jn?Lso9;}nB0J-97_GyquRL6b zlm6f$TZ6u;GTw^-UiQj!6;cAEN8s1dEakES6e~c=>SD~;DbZJ@M4oLe+3HPBEaH4y zX`LLHRhxy(N?sGpyE)3jSP%%D2hs`KO_T5bo0Uko7lR1V4sFDA^b7U`DqdH7)fNe zwDVqPTtybU1lh9mi2Y57texFJ_=A9~9&hq$xw={V zvvn#y^KA{Nj&92H7egKzq3Th;kV&$WFdt*l98)dR9I`P;JYghyiMEkr^=o{5K_u&q z+7Z07?%dn{oj-*ksVGmfM;n``rnMvmD31$BhzGKOTv+vp_)?nkqPa~-GyVL4+X{wb z@I`PX>^!GE^rAa7sHx#onrtJOO!CWhb{9Ge9*c^Fvt{nCd%gJOmq`W~TcYgc#+296 zm3|kDVGcx=K(3hsJo}fx{!T)dG)4h#O%X);zHWL zYXR}193X%r(~p9IBB?Y#rc=;sC-d1qkqK)R?)92Hz0M*O_2X*SYKK3bXX!b+ov!j* z<%caL33@$_33Bf^!NF8>A)vS*36s>N+wK_~$7;0~rGmky_SjZfxn)6&9^M`tf_u1J z&VlI@d-czlA7hLzuui_X(UT!%HKiYjV$d;Ar32(!s$x_ia6=aEbs?LEBBkz6AZAP# z2u3m)H}Bo_cBNw7`?E!=KQK6QO%if#p_)M5|6`Dso!19zXNtjQr8l^flb{$>WGUz= zz_)!-m)VO-%VN?FTGj`UkAN{3!hV@d8K=@gkr9W*J~*;}_YS;F2I4N}oMgSK^gh$H zrPUHvISF3FqtUa@X@iu4yz|?orTvR8q1nlxACg()k$tl@UmBfxNaD`1mnN`0=qXkh z5T895r1ERqW*FFc9YQA50lC`eWFXGbYkdguHWG+D&+bq3O8Ik^o}G`FGCMn5HsCL~ zO*fVT)hnzN-i_Jef-TnU)qAjpCmiq941R@XH5`z!)8L#^Gm);wuZ{(nnT^iV8uzCS z>~;?q>9g1Lw3?B7A)<`jhvjv-_v+WP28@|Ys}EZ&ZAvwM%TDO)Q$Ak$xO!M0RD%_q z-n88=S%kgaaV*#jKAXrSM^5l-bbleCH6t|2{FPmd4r&e~`k8*VwIARpuB`A`;ZtV3 z;>Ae2Dge{K^`W|Dg-Rvtv75hbHS)uTgo?sM+j69E94lp`_vUSPE;OFg?AZuLez8V3 zJ{kJF%DUo-+k%nR6OmYmi!Uo1LVd45OumwG`-|r}rnDf`RRgPQX*??7l;(};N84j2 zI7e4tE`H8w?GpuXrbaM3WU#@6eB(c9xe{1Waa?~>2}bk!mbJK~tbJ@+;&UKiAc^_K zXk>i(#w>>m>`jhnVAd)Nn*`6xh`4*r2Gj*8!emhlrt7z)#+26b_ejBS&Rl+meK?2w zjVY-zS%e0h`pA88WFc`fs1oaIs}oI$4b5O(Zu>w_@Nw?f-LLNNud1J3#)PiUmn}Af z3&To$`M|2Ym8(-$DeZ-nYVauqRyBln^ zb5n=^H~(ZI2rj0S4Am-oza6?? z_p3nP3q0SHxWeexsk8c_xO2JPZ#`iBHo(Zoci(Yp2@F^Mb53pj@6-MhF_IyW1c8j* za~3>j47UTLtQWixu?0}0c?(6TC9D1t6g%0j*uR&D?i|cUH?+`O%fRw6v(4l0@T;ps zzgsN6xLi}Q`(ltS5y+(6^-iwmbHYD=@4jHIW3P5oySXoqI6Gzf-J%9}<4_zNTec3I z{Xi;=0c?2;N_Wkpm*>gU1)4h_Q*^eS8GExIio)XnOpxGwuzd^m!CX78cUr&J64+js z*hi0Xz}K;HCgnH-Kg2q}Ve;=C$%U0F;i_&&l<4VtFJg{U>Ou+c-#h!@Dv^JnR84JE zN46j=1H4OZAa;1QjIK}awKgE;@^K#KLQc0+P`;6$B+!pSKnM=_R^;?*5+hWibUI zI+3cO2>Z}H=7!4-lyu5TM_OmZ(XFaG!7nq%&*{!WWjfjoc&l%AkTXTwNv-)t#2Yrd zXI1o?d=S379fdEIw-@bxO>v~JJy?KY@ovr!sNp-RyO+vxB|l;>mor)!QipOPqh~|= zQg*g=N1oo%SRa12)d#pEnalT2=xp^0*fhBv9o2Zi&azn=w#v<4(OPx?k*;~)i;QZ& zOqEn@(}J-(LQ@RA$k+%xC4nuBF)L`%?>MVLCErupTktm%`dNCIdvg>uq6LyQABiP1 zBjHb-JlkMk+7Q2x+-2gyOBMJ6bTsI}IwPO|I(607mCy;)?eYZt3#fCt#znW8P(6Kk z_MIl7Wpo3{j@XFi1xj}%O;HSj8K*vhn?UV&(@qbl;mbVK7)$XFv zI%=8C$7AA<`rru_s%Ih6QHmc9sG&uFyHhudT!aHSR##LB#+oSh+Xmfq@4C@hWDYt*5Ye?hrn5aq;mY9LSPfWt*r)3C!wxXv zemt(qoc3u8o1G$%be>LASg`4G&F~(##)F@Bv~7vJ?x|d?=7sGe#@7Ay5qyapJ3lNG zwNJ#D0S+ER6=1RMf8J`tA&ioUEAQ-tIj*vMrc*GiA(QM_fm=^|frc$0RL)5FaShuLvss(?gakM2f`jg7GgXOgp={g6QxYs__3&cbU zo>ltSy95o8V()b}db!_#4ET(UJ%kMe-tA#PU_xtmOBm(D%F$6Htv$KS!H>9fANNF8 z+c%Lz4k5I18U%J$wOw1k8ZqITVaxVwa2yU57{|LW`F8$8nzLEc9>aC)xYS=bh>F(~5?rxA)NeuJ5kcwhh6U0{cPzTQW6$^d_t;|}!hjPPFrRtHbp!k$7&muAV%(yz4};u; z0+?24F(Y#k)8GLhPY6G?*_xL{S)rlJCc^0CKBGcTyNB}60*KnmH>wL|5f&`REpj(| zNg&+0=vQtrdR*W}-~^G7q^Aa#dlCw19{ASr?-c_M10XzE6WhZDVjyelaGQ<1DEav1 zJ&1E|93knPj};x=-vY6szY5t{5qShX1=5D6!A|5|5t5^OiW@3w%1vA0%V}QE3i&i%MfA@2#d%2Q{#*mE*ox8QFD)fLM_nqdGHvNX<$ShAov+mgBDF=dV-P^WH8-GyK{gMr?ANZYWVqTR1(%O7_Om zAGmZnu}~eE+s0D^RUiF+tzBF7nRS+3r&|y@_FZ^l$_u>L7WW?iuod=g;zanPGKo&} zXr9I55&aok3fZbqEHkVHl%q30nUr#=`@Zx4+?J02D%3E06M6I5JxB9ZgWG>orb<3j z)!R)Y0zK;IGQ+NCoKd9FojXqbbPJ!W-LdnF3#mac1vk(!DmQGwRY;_q%^os&iH@iT z0rJ9Uj{g}f|Fwi9zW~YD%QGuJJM-+puKG!*5psxD+3uZ8No{oWY&TF@k}P^Y2M&O0 z2Lwcq-;t_LaamlG&c#!U4a6dCL^6U^-}d(n;d7P<D&Z&c}BO%vmeUN%$cz^B}U-nT4TFK*CVPFSv0_!Adm9>K2T!xhlP!Dy-dU!-7&s}ZfPq{N`Dq=urSQF>&3d*P`V}(UozNIT0|9t`MPy!cw_?Nj;m)zflwYfvjaJe z0|jM0p%ml1>Ehwy`Jp8C?2>nXv{So!g%Q$>+ew)IIEB?R4!ekljKv+e6nF(|&~Gb{ z2VirK2k}^$4fBQ&psYzaZ|~p^BtJDX`xgGfVHkBx4vo1JwKW%Xrm&I2- zjr$|$@D-Qf&&F6G{e@zI_xV58h=Hn&;xrS2j;J)`coRmhAhfEk8v#@z=Gb8^7V{5O zF2~nThGu5w_Y6jfVv#NgGBShF8I$9vfMd+P0t-n7hO7xj&nKq-G0^&?l*dt-BY!FK zqUCniq1>1Sx{FOqiPxMbzkOhh7J)T2So4GF=gFStoC8}1HU{*$&KWbuPmt<9bv)HQ zB{0hOVw0$8>o-5;ze44Sss0S)^zhb9&b!pVW)K`&j8Q((9dJn0Xj9aB=x6Q8?wz)y}+wY9hV|konum1c%S`Y+y?v zN9i3r)&jE(VTE38>3Qs3#oOb*W<|UQbnJ&giEzgVVg& zA9whBPUH|oRK|~iVf6*qBAp}n#GplR^g3tsj1E*r*m?7#%7pX=XVxgrm9Ng3 zB|bpi=2R~Fpl!-#PC!|=9&<#86Ef#nS8daWe{3R2`7{si>Ew?X$cvQ+mP^%zn%&qi zxv#H*esQNoX4E|v+Py|_5?-L1MAzz7Qne%bIMzYL`b=reo8QT7fNA^xx;SJYh$DYRlz7~*$d2WYq(0c8#Gr&_Ko{QW?mp1>7f%ZrRH`VU>ewoq`pX{z z)<)S#dcki!g`AO3@|81Xb-EB!A^SR)UL`!jGJhed$7Bm|UwhB53KY4VROP6W@eF)?4GxTC?yfGyTPgY8M{%V`K`p6g*E>=g}In&P1Z>?6^ zd~pOTvj+z1NQV9@e`pQcvNar1=~vmc>GVc15jO|xW`ysub{1S&Qd+{AJClTEcHbKa zzmCn<3eb`Pe3@dWdxwx3VVgc)1ly(4)Zq||QzieF^{U(z$e6)=Qff+E#5G%gu@!9; z=GJ>@Vl+$#3dzk>W8>i zCRaoXvb0P>Upz(ng%U7?FndBRdUR`{@bc`EKl2wHG9_0^Bv?QHzWM>G6QEMCmlvU2 zW0a-;&;V0bt^`9;>JSXxt>g2Q_f1NDr#(TGw2QMlCkDC|^8IFUCA_eQj@@WwT9ZsN z*llxVNYd=Iq<=Lgy4#WdfDL=J_Y^Ui1~!zS(l$Z(K(~A$5}C~srik`^m>)e);wbKS zsd@)XEFpW?yh*T%)R5$#ru87D)d6&@)l##-dY5_99MOk-ll9zmu2ap3Xv+DPQsS-0 zWCB71pobpd^;+i;=j5A+9-ea+wfi`%{q)UdiSX&W0HXYru}W5$6TyrGot~)D?PKXv z+B$)-z`SnY_9w_5F>Pxe#!mr*j?{lgqc<#VIgR{hY=I>QHdZ~*c4)u>h+)}fd#%&Y z^is@W%@&BGI-CH*2jg3DmzW|hAs0DlsEHK+D-198p21!rmlPM%aB*Req;cxxA08dW z0kZ`~9tZz2pPuE;gkx1#N%`$f$FqgC6W>wK>FleM0XM$UrY{C|!FyYTYx z)Kf+E{!ajS%l`if0CKOp((9cM`Dx=Dv~nQ>aiIoHa((!pE2v}wD`pvLvZZ`iZB$f?oVY!_SfXJ5Sx2}~ zpr2__(6!F?g@E}dS1V`(^i#u>`g&dM)rgJRm5MA3)Q>a@-EB>pkeB9t$#V1w6a-7| zxZ3^M2ejaTVI!jGY2}D-d0GU1(+$rriNq+sIY zOlY^x&HMk^m73aO;7SYpK?nXW(%;1u1Q68QrJKSwReZ!;kJz3}_%ovxmA@8`zsQiS zpK#udAk<<*3U(%orpr>;T-Z$pYz3Gp7~bUOKpy97(7+vi+%6oHxzClx1FaSa5b)-< z#Cp^611CUX<8L11cnfdVFX)(t;O>(;H`8e%AgPbUs>24U)&n#FPplQjv#^Qtzr@@> z>9i*(LZ)HR_>{e@SG*2DtpA{1)N#Ubnzyu!3Ee)D#9o=0=3&t6_rZ2eGN&Lso~@>` ztn|vQ@}EXLRVPEL3y~oAG2t*vPhzDfpT>3cT`tww{6<|4i&is18r8;&*0X5cT{cVu zPJ`Aslzjne9$WU_@O_>&rlk<%se=>;?%+k``m;5%AIm-k<_?@+k^z(V#>)faDZeF+ zlT^sRK%UiR#&IY1PsNH_Wd`0bpHrfu`{?sPgDn42b1ZS8{vmRFaV0oKl(gBepdgEp zw6&ECxCLtBL^J8y5L8pbqppxPe{Yk60P4w6QR^oE$HD5Zl}$6`|XV}Y!~ zf}P|dkIsd_dt72xK86+g;~yfwb!^!t@-g%>Kxq#VC$A~R9YA4$(4bmqLdx-mnU+le zc*GtLDx7|sNH(x5199$iSW|@7jOsz+M4YngA_l&;m$)9Jfw!kU+HuW?LHXw?lA}oy zm%FOb++D#(>`vGZcK&!Gem{;!Kr;FM_Bqm8(oiATeGAqZIdV7Kny4OYaT$3)b%nLS zkm^`*zaT~8={h?cvKsQqpn?fP->a(jsW4Pxyheq7NxkU%nO9xp@ml%gVn^7T$x#?n zvL3$xi7=Lsn$MM{7nW#m$4=nXE`yOCiX%~W1ymtXJ5azo#(ePtQle%}*}R*;g3tUI zp%g&Uqk}b@!wbM892`ki$7try&)AzI734M=Q8+L)rnxdm$d>`URc{JHm+L+orAEn= zl5|Ol0|}6*L}ndjT`F4-gdbGx-S-7Z2UETtLAry-Y!=!MHIEHmNN%U~+JpFDUm(!^ zTRWi|C@6bP9LWsWnA{2}m+O|6{p;5I$UKzG4L{!|0?i^NxR^F@*?4Qe#(lk9D!Z4t zQS@o{q6rd+zQ_xH#PN19?0+ct*|zRyQl{S?`&4H%piamI=Gy$NNoNK)Wcg_6m>awq zH9%We-Z*yye^v-Ku9L|7GrqAjJ@u#i@1Tg&DRxjZN$vz>4J@oaECpw>UfdO0YUOnX*n9(h@6&_t-;X#3|BdTJzU zw!iTTz+k`cvKiOfa?Dj3fsK}5u!*bjdKlr7sjtQr6Z^&-T0|N{%P*B#*nSl)gq0!e z+Jsc!v`ZEci#$bDMz1np^}3Iog+|#C+O5kYNL67t3#S5WRwLp4g#B8jqd`K9M0`KM;8Xjk#wr&88!)k(m-N;cQ@A-8s;F{v@Xa07}rtd-G<|#@LzvN?JRJ&kz zx;n;j_J9XIS&j}4p=Q8@NJvf5Gh^D@W7S)4j;J>_5mEGNo43X3N(-?>fRLEDxO#~y z1wO0xapalA@c7M$7y6E)s3#Vc#Pes+!DZXSs`oncg1FB)H6gqH&#R+<#c1+i^4}nD zz%u|Og9ZRRPXQ2TfhtP$@MlRFB+PeJUAb8PCVqb-Qcl;YblV7#QDAs@L$CG{#&Z$2 zprJC+A--23UMC^*#v@&8m<}h%3MWi&W|7_Z-IY7Ulf@z;A_Q_o^X8dN%_&Es`=>_a z;sJ{Jbp|*dZXB^vIe)gG&hQX^u38(Qj-gE@F3wV@xvX*SXpDb47F!^*%(MTmlRQ9< zV)=U{=%vur>7GNR$rsCLiBL}RES7k&EV=bZU`A2%L)*FYGhu6e08vLyoW;jzRAOqU zl`MR@Wf7O2y1#AbP7joiZQlnJBnY?)W%Q<=h}Vlq0j(e+BRtDY@r{A_!~m~IM60)y z1CjXi{=n4@myHh^8|6W4iwEIdRrsPz7N96#a5Z>)dJ=Iy*NYQ^-5`eB2F_;Ar}Rb*MyvlFG-5JwjjN!^di@KsYeV|Y z4J`Up;|bfrL!Ody+R603in_`-67gWhz`6;Xyc-{CsTYz~m3%Mfh(E>Ls%KUs5{nkI z`5PtfcQcosh2C;{(oKv|2O^G?0}lO>Dh_F$NFyol?q}&baA)b{u#r|x1Ja+yE9oEF zm8E;z38}`Nw%f-EB2FvnWeuV%4X&EHhK(6`e;U0sO&!}}oooPSV&3ysIxwo}_FRfi zFzVOs2O#4ZIiO-SqBf)Qvobe@SE5vt$|cRG>jM*j&j6lnK)jInuxXt=e)z{ok!EzG zE}u8PDwTf0hm~>q(T0zco{N9-wWX?JT!khc$Qt0j(n>EhBky9b71ET-gbUDa*O#Ng zaKV(Xf&)J;+l1Drm)9pG?|tRi2>Y-U(IGIt1Nip`G8B-@07khp!V_updKd@^km}p|>YZ4H0P!bLR_(l_m@~^v!JD}5NcU-=xF_zcUC~Lc!%rSGw=3+?C zrj*TKWObjV#F+ib&`hVvCP~$=`mq;Ior;|S-q98DNK6+SIOJgk@R<2Ck)@7V$jsc4e zyUu%Xa%z1n7pY$uo??UkHc0PIpG-^2_~zZuEE1Ek?pr)6xBI@9qKg~7j{OKb&|Zs@ z>sOYZjKN5%vlRlQR(^>w5#_6v0|tOG`Q+bg*K51WJV!Nv-AZLZKe0=Cf2NoCBkP&V z?m_!HuMIE9`uH#0z^b+LV6`wAx6>vMn}Gn?vi9eJPEWk*;8orGDvE>-z&Ti(&Fc-m zvB|6xa-9aWGYN{4yK%YmS5BPTGo=UwHVrlM78^=)L1ih(o{Vcl9%F8OX}Dx+5Y}7U zq{ICPD<^cqg|PRG^*bb7RkgVxm@eq~rD-m>McEgTc)iP0Pv#>Y;k0mo0I#Z)n<`>K z4NmCAqH?Za{`@u}_+6jov{`lvW!azyDc)%&NL_8PY6fQsm)JxoN8z1f;2tOeY60v3nB^r+amq<;|*cnBe%Kp|Sj$tZUZ_Z2Sh3ZIPH-tg-{ zc7z!9=*fViv_(__!v}EkRe79F2vBjzayGr#S%F_^&5n$2;@CO}cNl^b6fms&peNxv zq9ei8_`y}GM%D5X%K&T+%P9Q|$%DXzC;$GH{y%0%2V`9Hxy|Kt)7#@(-&OkuQd}O8 z^oc{gq+IP!@t;56$CSE2Sn2YG@vh7pOc=3oqY;#mD1v`L&nOW=Oq(d)IIS@B#1FoT z5dLQ6xGVoEzx(Ig%lZN^9r)ErZB&G}WL;4d5MY?EwMs7LoA`;in;Necj9Ec!|8506 zC9#Oh`tAvHhn5VR9WRLnORQ)FA}hC)J&Ev$HJSBG8)wguIrX zFs7p-jhoy#{dN$mQS8~Tb`RY`3=%Ls|50NxHE{@$$k8Cr>gnUWz$GZDIGoc- zP;Q!06?NK+TM+u9Q%)VE_J?3&c2j6gn5npcn!#Sr5!%PSLY~1#=V8%_^Nr)IGp>WY zyZ1aarB>z9g828Dh6D16{GTuA#cKasDSO~3ECXQwiD#Lhh%w2aHzXu3ukUZ^y{@A~ zoW`;rKgL8^sQF)}iK`K^KA(tZ)uyFv6dq#5|gcCB65@8&S_w`Pfb?9v4xlUgtH)=j3I?8Rbqsd@)ie1uv4+x6?wn`D zzt{mF)5YR2Sm^JULvu(-R62LHFvFc)%*Cn$fHGtu9yy=xC^LHuSvq7g<{8F7ldp%= zcQ(v=j;;)7KFvbH)K6->n@xrkpKARCjnzlCbNF~hJpoh{qdX)Hb ze0n^@albs!ByawrLy7aIp?7E00IG$;0cAhx79#Sk$IYXPz$I{Q>*U!B0+fS|0kJ8f zgEV}m476BdGAlKaA2IbJ%hOMU@Vbch!k^F2Q;hFxJW!tVbdckfHplyWhJ+g^?WQ9B zdGL-16@EuvjB?ga$0x5w9`p+S8kn_b2EDY?6*{3)CZ9tHf4q?`Nd|3l`$q4QQ#82J z-6A=D(3lbUX}<=g7>Zvn;u2!q3E8Kf(aH{bx5tCIWoVthk3Ieqo;Y8b{&XrEg02-P zaa7$Xnst~6I(Ty9KEmd(QZ(|*M~FvZ7o_Gkgv3(|-KJQG!t?KxgrqxfrBB}cX;u@) zTnyRY@Fe{v!r~)dNCGh^xIiSAtVP9>po)8>q)_WDCX=u|0bdqkEjJe^D%tS4Ur0G{ zn(%quN_8~mmM$*g{V-6>BILNQ=3gDiEd|C1Pgc}Y#?4zQy+pBQKo|0GcbK-vt0*e6 zD&hf?qpL~)(cMiG>vM9nvqO_zw0nbn(a@9C`J=&EdcXkfn2F&#Vj{mQi~!vSxa-5s zP%S0KjqObW_bT_d%K+lPS2Rc1RRA|)4mkHxigsFJ3uQ#9h#e$M6~`4Kouo>|IL!)~ zfMgcCQ?E8zNv#_bD-#13;qMVa+q&|S`aR);5ySzp35l(3eNi2588yZWB%G&Xt79e04coo{jXn zrUSOGJwajk+>o~mL2n|5{Pt9F44S@;;Xm3&D+#mMK%QtHukMCIdR;Qoz?ubMM0v!bM5n2pU_N^COLWNwz zVh)oO1>tqA@^SOZX!=Gdzi+h=7O_EfdZn4Lf_Dw9#bU{psy^7?MlIAe?Sg zvwapTMaQu(-&b@~6x8&i#0N`6W&NvEouK(;no^C&qM@TPKStZUa(`z7RJqc8N9f!(5%ea-{QPht zNPy3af0L((-;=_?@8%mKSWB4DQ(PK;znM7ki!!-uLs{K4;0YDjxM6uNUKSM1sZOUB zGL!>ekT3LhuT_ea<|BLhn%EWHIJvpYsr5!+rb7z*zqJpP0C<%@D&*^mb1#eU2ANyY9qRg5jHU%3u}IT4++i1e62Pu9G6vI^{3O- z@*F+Jl)8IsO2@jY63wfrA~GS+Yj)VC5QY@{@^|$2Kdlbj%>4+z@q+N-dC5IJ-2b={ zF>;f8qMGwD*0>QZ)0CdL&Hz)@-MU8rNJS)3GdX`!2;)<2LTm#6_*N1Zd~{@7uj{rY zGNF@bRM6FJ8R%Zl^=1V_W(^fm$sg0EmxNA?(po`R|s* zRC%YOgA+GC6kGgr;fpJc@!EMFA>IidS~#CF}e>~0>?=L$aqR815K z7uI8Ok4Y-gCKp-LZcO+*R9R`qn06an*BxoZ)Abfx>QW|7zIyguipHls?YlAUP;RNp zc$>It-GV%EAuC0zMp*O=d z!^^|5rG1}gwhC@E#VS@IB*nAY!?Ju!)upD`y%PcW0Ypj%;8({u2%%u#lRIRYbDd6Q zQ>f|P68jji_H9w{-9wnM4V*RjWg08k11K~+Br-(|042)(xr!`llU)+)?-4{DqQ7;3 zf*AqJmk&?3C)_^groeHJ`VbVBF59#(6uj)tmIH9d{NHEyh#tuB?d-bq zrII8+d5r_K~0sWZzLpl zEUvFkj~loZ;?|t0OiiO%?#oTk(9hQ1i5rdp+0swfUL2u8mGqPAJL2*Ch9pIPa|)a# zSewRgMjo2pZYbzi9MMy&_GaH*2t08<@2gX2KF(;HihYD=y%Y zJpl?=qWAXN>hiT#S z^me?p=@)LGW|A$`LG=5=KZ&$SrXDZj>nmz3NCVbQzR^ZABV6bmAUQw-cFlkm1e4P( zwYXQ?sOE0EyjlP}QC;Gu#<-i|_ZPH`(B^bs@0csK399XDrZ950>gMVQrZEwFd+{^A6UB0X7A&okBi63qck+%3EFak_Tvq@vmG@-qkzf(NbY`7&g(TMRLf9ZFp zKQ)4Mb6VZWS(i&E*<+G!kcs4?|FJu|%kriP81+STqo-AxRaq$w0L2!|N$dUyT*7HB z$Ls<45jXeT?zBM72#5fe>bDj(FI{`!X8=Ai-_|c;M@_)sT1!TXpFLsQyO%-ReQezO zhR*;(Pq{G-tj6iG!C};;13G!%5) zuMYt37R|beU76}upAvSEZ9%Vft6yIths-_=oGZqVl`ITgd{JUmvH*N3zEHjsX(sx+cyR!E*y53=Dpe)Npvdh_K>^m`6J$uWu-&$35zFZG`V5viHsFnRKTyS z`E3PY#SA6iw+ns+HMAIfW$EMCYYwm$o9bHAP|_2S(|oJGbk_NV_nM1lH|**F10`n~ z?blLMM#{_)a~Ja){RIsRo&I#Y70Y2bXx{k_#<^SK`&@xUe!+kPIc5-vptNfW9saw} z5e4455TZua6~siEGg#uH49#NLT(~6>YOvu5yJPRRdwXatZo9ueT|{0I!-Sc5 z6yF1U?F|R1o%#sV$ZmAaKmk!o8y*!=JdPz)GAGdlJf^4;%=t&xck~hMEB2BM^1e|{V1bXq!Pp$5$SLFk!7kVsBEfhEqG8qVIe%bljV1qh z_JK>PGykHK-gKuSa=I>7f*rnDz@Jx*U>T3s7?K5b|q9Xf^T?$=l z4le9l9BNz_&~iBHdU|Yx>F)|GEhy0vU>XnalRRT%c<&eR#zMJfmAj?nE(v%X5CkZT zL<+|dKIai@p1O8Bkdp{F`_`L(h8%?w?8*Ab-cz0ICa^wu))g4B9?W=E*rwaVO>be) zt$Rfe%8~VLiEYsWjEaXN+qoHX-tYXO+%PqNeH{(|qFV*N^>|wD?y`{o;v@HD0YW{* zHqxC}}hoh!?Gbh78q=I6a|T?=}SBmNMH(Iv{*Bf+WzQesm_rCbVxjyaH3 z53pBt@XD{cRV8O4UfgVKIQZ<~sFvLY;oMI%XyKr}hJU<0uxklR2-h*{IcvWX1X;Y| za*KI9T<)lqS$iIIApodO!vgx8v&_AD#H@(Kh>XMohQ<%{Jv%96zU#~#Z&jyCWW93^ z|ByV5mUm=K(JU3o5PA-O?g-;)FGUUFhgO6(33@1#6_VRYB+B#ZkXrG(Da}78aG;Xp>7hsJfI!@ot&n} zf3Hl&BW??8r=O3CgTW;r{y~HZGM${Ylk|kia)&F&Wst5^~CXT|TS8R86{+>g% zI5>!e$Dlmf@^X;~e1qrzdoC1Hb9v>pQC21f6v$L;wWX9s#Vyn~~CFgQi?_ zy+7XCsY`x}4vojP4@Hjnx_qMQz{ZglypiF$9Op#s{+T8YT!2J>zUmL)`A3g_azXRx z$pL4QXnt<{LTH$!MCAmOGJnGc=>G^am^R;E_=WG+8Wdt&7_|0-+s~8?{b&u_XgN&= ztAvMmLme?(T+u8^0VV!ep8)zO&_4 zC%zAycZbBYZNdzn$)iGfb$a}loDm=?^NYDtYPZ#rRTG7V@G9i1B|&$rvZb5we@ z!3>L3HkxVN#;zE@7;6J*tEB^ee~kO6zV`85Vee4b|! zk0;{l28Rw!$$pDfM_XV0oQ-Yw55naOA*Z>SWzEk1jC>xz4lgGs-_^A=9iz>cQcWVO zKiV=a!&++F9e3wTSuR^YfFwlOsNS<5jZ)P}aB6IU3nwT1BPahPzkQpuA@s{S^~Cp> z4{6=HviXic%AQCm=Y*7DLkl1h+mS?xXykrk(VnAMH>*L2LD6~$&=)Q`SsCIkFqlao zC$kQ5%l+9UGz<%I!-?o%sQ3IPZ2{}xduo|?6EOfkj~Z*`7`COGAzD2D(O^3FWh8J) zg@R=wWIT>KYn`Stax`w8OvK`#QoIuo4v1)&Tw!{_oYOWm9R1tN4OiDZhQd#D)Vp{8 z-a*+(IdW-=`jDfF^UjNuaJ*v7iipZA1f>sq!9#b?le8FEDoE8Lp>jB*k^zncg`cq} zGs%ZNw*D26v!B66m08CeOoqqd)CcFnLO0;9a6C*y_m$vzB`?47@3v|l^^il3O+vY^ z7^#x|lO<+0%euWPV3v61WAM;zRCJYNfGOadt};udwCZb5jojdaEZ>Z)d~U$diZ8Bk zY~k>bc94ZfL&fs|m38aRjT2spOtS|b4v`*Tm?RDaWpVi(cwL+4ZXNh<2m?d9+ksP{ zUF`rK7xqrwc4Psx_Cy&U}Ss8K~-a18>c$RHMn@7&7Jg4ax_k2EU2Mk@dIDZZmS1(J! z_Z(#5r8y?lj_U!k@O|yr(aJvxn0~4H{I|#cCm2>KZ^G5*&+=3ZkB?!TpyHSoee(@b~xDJTXOw8oW~-G zTEdiZSjj*YVGohsDEx5hfVQvqt}-8=P>D>MFr`lS5XA%8hSHe7B~}g|Vq)+RmO+(H zQxXj9PkekLK;7COW}kP=lB8hyZ->!jHS~L6R7QtkQNW^Dct$6apbA| z0JYpdPVswUWpQKRYa;_v0k|vhy`OFNz`A(}fSxE*q`@e=dB1us;zrc4ViokQ z|J8e@1?Go4+41L{^|}{Q%M)PwYQeX4#Rhe6_nR1oEk1~PBq%Yl=)E;XNUmG=QrROa zBW(;O6O}cvR<{)szW@?B!C)c@{oAWQH}<_S4*qvGJ=h--Z9(8uuznNUl+q>!r`T)4#lS`ITJb_1?1Yg0Qd!&@l_l91kF+NXQAPhVc!j{ z&MW;iPB=H;+?L+41;kdxJcP#yyuHe+G%vrzxbGSet3TRz@s+hJd(zyIfKSgt#Vgg8D%oA=Z!gE z9J3lvN8vbM zn!J+ketwUV>TgoWnMuoBli6F-hi#>hQwC3)_^biQ3oXruinS^TG3bZH=i zQ35lXe3k`Fr)W*DZB4sF0x-yt11?dj9W!7?lan#k02UT)WVg@VZRGg{Q?&M&NpzMb z%_1i}r%tjG4Zw8A%RNEsX**fo-@JhF^VOF9JmJ1SUn3TER~auQ&YGX~+093|11^ow zrMKZh^xRn!Rl-TJulSe$X zQ&@aRBq1AvnEr1L>DE_d5I!W^xP&_PK?6_5N#|#1AN;^!IzA4|^z2akuaevG{;3KJ zS6e7D`63@I&8sGMussToc_X!;FV%Q4LhOF8)xvK0kRr+luPx*4_s2bp)RJP8Ni%0V zFwiX2_<6cWux1oIc;)NRw1FF~DTbF*cA^z%&|+(dl6a;A!S89NKMAO@z7#bsGP*I^=1ffvD1gP; zbdQTMp^whA-pOT1hr(@zs{V{KZa|{3gecSh<1!)pMNskZAFMxLv`nAv3#1v?Zh-P7 z!6GhU7l$X+ch<<{zP8Po%~`}-1?Ooh@^cbJABjnjNIg@ccx$W6uTGH%BGV*#x?E|4S#f7(>j29Kb)`PcRL1LWQY3H9@M`Wusq1 z3@lAA{4j_p6Ae+_3AY5c&%u1}>H1xvQE{KB?G}_M8u2 zzF=4cD-a|4(a?g~xX*PP&7z46^#4er(=cI5)*5bdTwxAof0y`Nsk0XpQU6t}RymZL zqRT8a4!hokL-lDwdh%8HkV~6FH-xJ-8pQ&odiF=V>Z2*xQY1Z%CEi+Mr#S_}KAJt2 z>7Zvx%|&x5(gCoVFcNQkaz=aF)2>%#^GCg>KSh=O(k9z$C`X$AR%s4Z7QnQ?OI_+V z4?8yXC470AH(%UMNbeRKxbiBTR!%8da5!}K$f=>|v#uJ9kBqCaA7FDd5GV1Ui%-7W zwOQy&1-6_ge|@^7%n5^v+R*x}i$zeF2ATWjof?EBbzI4Uuv?GNWS!IFzfVMOX;=Z}k4@un9FgO4QT06S(H(D? z`!Z5Ikagb=Qn_9&Ay^j6lPOfS`l$#Vme;#l{*0m(r~hJhp1}u9u$rL+;z_Ax=MGIy z0j)^tEW^IvU!-GlEqv_Z;$;Z_3sc0$wJ`f(0634#%zPoG$B`rnjVpgh@l4#b7}Kk zf?t*R931Z1UIA!-+ z8%mpwFqc2X3g<=NzceLc@V$c17hs2$QmGf0h?v_M7N$M9yKAZ01WxtZ#)ry_e%JM> zm9^dfRS^peFuEdIZ}vp^lp%~7TSTHQ!r$jkTEfbuIy@>bojLPY7}hN6m%ynxtR?Gn zeVBQ#Y427*lUo=$9wER;FB>;+O<<@0Noq=-_C3COh$%u|_D|pa`B&4?^#FV$n-@P_ zWgOy(b&623rmCHao)fY?*HdLw4O}`L(g1V?%m<5kFUfNK7UZkFK=9ubh}r)Uw-a|! zy+9!OIKr{XKU!$$6e8Gn+n_y>tOiDW>n=ph4p6<&zj?w#3LGY5A$=UV*|Ul6lN)f}@YfthH}4PKR&0k}l zb7CIo(_OLizmI+OE#mloHa1uzgw~Sef&5kHgvCuv*D;e-c8tmb-A12%nVZMR6xLjp z)`Vi6GDP4YvpAzq-|u#c+S1sW0>*JE`XO?TAs4=y-g||+d~@FAh9lX7kG?fjyAnaJ zT8Y7rYe1lKXT2!k?d9%%^(=P`?$QAWR=z|~hG`pqqJEZP%VWOf+J3Di9zCJpX z%?5X!KV2`czY;PtGcUAxb6QX2R?t)|H#n5MOE0KI18TSt3&56~{BonyP8YXKjq9eg zCk~L?c+-Sw{O=$U8V-Kh;ruA(jWmM!-n6gg%j(;%ckn-8PWS(A5h}KTGYLOl3l60!-`vo-xrB;EUus} z&W-TSoii?%a_7P=kC1pnlPqvWqQ4saGJ<2=4grc9WO&hXL$0LgiGRh}9 zXeoYLy~hPA)qeImj_I>l%S4!iulyG5x?)5xa^<$tyknHXq89h1;`wH!%<`AY z?!u$%k%r%o*B$q6!_ht@s#qdTs6lmE?qfHgHop4rF4BB3;bn5)I; zSWO+&hbU>wV}599WnGD)SxuQBY3oydXBOk3P)nhwx5My+rKiOHaaWmu;I>>Woie?I+ow4(#`KC3pYX3Km|Zsyev`gn-n^f=*xY~j&} z+HNXav|sItq)a^C5H9gFGr5u=N6(*QO;UaRoK{5QX)6kx<21&o4DIToumeTeVpth)lxf=F!GO7W?fwpA)4y8S-K65+(FlD~WrEOatqPI)W z1CX~#OtYCZhS;z8C0nVvQvFk&e)Wl*ULzvpj24mk+0ukzjcX2(c>!z(aMgDkvyMuc zuoVz2#Z;}ehYBnCuOq|GqUTE7$$yKV=bid5W@D1hfCw-vmqe{@Co~S5_GM2rE{%n{ z@d|w!W;AK)kVm2u(M;RuzMvRl#zuROftj8-{aS}p@6vcYA#)Bh)x&9(y~Bfb)rUJC z8KMn>;K2jc1A31Lf7fQre^q#5RZebsir-#o)a#Pd8~$Qo(HS; zkk6>URvW*7zlZDTi`wV|@jU2fy9^kYc3&4-R5yIHTyS zU-%-<(Jw%jMTi~U?r)DD2P3;Y zenm_Cgf%_2DE|`d*n?RSb0h@5GotH)2H>D|iZF_!rj7sFGI#zHW*1d7H#zs!>|@&P z;yK_ZleM1dpkQB-O=0Qw4~dbuq}{Jz*D(%SCH!`NUND?%?`&n z+(4IA=^kvpE`)~ZbsQFw4HvqunQ><^MxtnQ@&AzZmQhjvQNK3bDcu4}gLHQ*A&7K$ zmwcgfJ*eSZJ@ob|l8pS8x9ym2w?y}$djuM226{E!PR{l6xhb%KiOChP~udEb6DwF_ou(TxTXr7?RY)TZFNKH zJrE_{F*JwYUT=pZrI2*E!s4u-|B?gC5Y=%A*R~S0lip^Gb>14f9*+1tE=?%q@+Gwy zz+p2Mmgox#@hl^Bp&+;-W43oqxib3DiP@}}4)SWElnGKnS@>tEEtySdP-)q;wSpxFHGeI8|x+k zT7e@-JSo3B+>h&{uox=Q&6#%WRDQ?2oOCkkW}8VmiARZR<}eO1#&rS1U2BUVZT%-- z4Dp69{fB7LvqYjkC|VrGA?#^$T>+=xDJ8zcZJIL1rxWIL!1M*ihIi?*mgjGYBzI%` zUsV1&v>5e(hIg%3O6RQEv&w>g30!vCiZ{Oh`_FUWV7us=ZP3H_M#y;jCAF51^Y{9s zXtte{{!itX4VjJ}Zl^9+tUUhp%O|e(fh8(lV#b*BC3tvotdQwIp_w`wFD^QQW0I za?RUP^;bhV)d0%8Q&VO&UyNCYur?}=G%0585 zOE|4LVuPVQYHcoM*6!A%S{guiDj$9H*8U+E;7AO0m~8TVByp6N4{P?{FMq#BCH8|C zhf>g_?%{f;16X*RUF}c5+UKMTodSy%TWT}m;1bP}FyL(VIrNpP%5V%=u*ki>JQJCn zt~3#Fad9X=2Af%fXBGI_=|)z9s=67W?hUZmUfkEDG8tu)Wm1Wg%V8 zMJc!{FPZQ%Xefrv6cQI;I|L3s+nGzB7DHAVh^=t!z$w8LrZ;B=I^tavX)tVPd44Wl z<#*8-2h*`FAvAGGNG$^X2;UEje3lC&_-NPNKIAevX--ohKnRi{^I!TdPp*^x_8Ljp zorq?gk1AYXT2u6mfVUx3sBCX4qfGzzgEXyF)k-~aoSeKsD)IuBUWDcXP%`PYFGoLc)6oNi6uJqo^ zq9VFcF!=AWps#}fw0z|ei&LD4!F zMpzL~(F^feQj^m2V(oKdTpwlsHx88;o_{(wA1siz(pSuJdJK3JzFiT&Q<#*~$0MC~ zWLtKL!ZGhwLD=%Z0HH^2qk;Hj`3~nm{B#U6xP?2HDd#)9yeCrxJ4-&fts1Dldo%c& z=_1*mL_W{h97oxWF8z@XCjL87`F<#Jh~KVgqR}it>+e)6wq5;qDS#gJcUzXz;p_M! zCibducDx8}wTN9^KK}RX6_0&=u<>n^M2f(sBX2GNr7`X8`}xE69gexPsYAkVJgx3|@04~0%@70c$`?DEIE2PkiTngxOoS}c4O1L~H5ghcmuz@)9@Z&eIaMbg>FUIR^ zmf34F7ut#-yh6Qd-+r4L9*6xKZ=Jbc8~~g*!MG4!Qs+V)_5p8!L#t>%|aum{m z2(B<5)bvv7cTPgscJUx8EDWLLiGHLt@v;fa8_S0b3-aLT0=G?2U1hcO(m3rU{ibN$ z&l_o!B!4ywYNqL2xdg~a#_|S0X`O-wQZD3Fz26*F?efCX#^S_(nW3l3TrVXqs97jI zQo^*7yeBmszG0{N_E_(v8Z_OMUX4@sVX->yQn&x$C)3WuhClB+5)^AsqzVSS36`pOi?^e_Oo`orizu5VN z*}#elei|U$ouA=!jS@|20#pg)hVu0Ootpg%At>`xhcAbw6xU|_*zh-80xZ_2x~>W* z8s;ZU4s!(aPeGhAUxvNg!ser1B>Stzl06s#s*KFbx_ENpHLYH~O-fJ9nnbv)-`p{F}J?A)8HlQrJMFAhv>^upm-+Y!;NQqi2uz(zykc0d}N*#4@_Umh1b|M;q!+Qr=Q}SJD%q-Uw5BB5J^?Hv|s|J59>_1^0ou- z?6lVvSOC8~Qms2O0TM;k=w)-Pvqj0nrx?r_%Z93h`^6>iz|mMH!TO!BWMO~f1$hb2 zZY7HEc3G{NaT-gy&zIiTZRj$it=tmh1I{f{ROMG9seR*O7?lw^mhXTr^NzR6Tvj*ItDm+!G18#Xf ztQ!D2HM2uWP96o&C`2ys0RULOtnzrW|KG*|i0duo{RDe}{H0(JDc?ReN- zZv%_?p&k{%kJ+BW)nYS?c|hu=WC^Sm#NXytc30f!6$w3g)P-mYr`SKq^3 zAn`@5dTJap(Zh=8njX{3hX&A42t9t`ba-_Je{v5ZAQx+NTnP6tLpG=EJ?Y7#u@8NC_D@=to268#R;#=y%#SCFy; z9V@|uya5%Aa7=`4FXTRXtejFIkjxyd)=f%b5{?FpvtSz1U-c(9Tzk6)aCNEnpWIF4 z(-${?m$%%F`MWSip$Raib9@wdT`r_YwK)DRt)SEJ+C@(h^Oi4^Vi(st#2%Cxs0X0cTa4AgQ>=@ZM<@QI4&WrLF()k;hW z2tVkOR}h(v7@M3Ca139=6ZI59bx$ z9JQ}!C2l6YCMyh<17hNx3pjErIwOY{gzSF33!PpqHwqggheYzzrj*TcC&6dS&7T3a zle2gm4tf%+0PISy7iF+2eH)isIdhwxt~_BRV;;$Q063QO<)dmx>U2Nj&htnNi(%k2 zvTBM*>G9%{#_=!0ft~d*h`NW=?k2@-YWPU>53WwTd>h9~zj{R|ycwOmiB~}P-N|rK zUL$a)N9B+w&DoP8J00NWNow{Zd(ee>{K%Cyn&UD4mgtL%&1vM=W#}ElUa5jwm60HL zvA~kyHiO?J5B+zIk?uG#a>%aeAXx=gV5Y3UAOJPeF#6e_m zHM-38V;|=#vkg<6=cdy1E{oP= z)rN9`lREr~!FKF6LByKYYaL;fm9ozxj;}7GZ&Rd0@~GH#Abo;l-fwx^wypSX+DC3i zr?b#nnEhCill%+d)#oZl(iAi%b-&O6ED~Aa6uG_CG9XCqGkO53y9Yd&)J9?o4fcE{TgP6L{ubS-d zWj`>1^kU-wt? zJ*I+__2!=<^BfyXp_B$AA7?AK+`kzMWko66a_qMfO51nTw=AG^CAHJ@^5ZR_bQ++B z{jJ}u{s~)O`OSYzL`o4}Wm}=96O@JxzRZsq6&-9YzKB(M*-FERbi^l@NO~0YIdA=G z+`To>%PY$`iJl(wH-s>>E7F$i9#4=<^RS2D7e1}NFzEQHuYO@JY~VY@j=(>)Daw+S z?%kaEY5|=n1Bz5mj-ZDynhyY)j_5<(P=~{L{atV**K?Af_SfZ{Do(752!f=@6UVVw{%A#*P_IU=$j5QYQXa&w$J^2F&o;* zZt8e46V02nho6t@1AJfC1hr_qe+m10@_wjvh*r{hAleC1%1BU*xz|19`;j4v`<>#@ zr{6lx4-z-2rDI>8u>I3C#~VAR3>!?;SgpF`o8P(6X@&S{^ma9gOyNYuL)&l$Fx0lv zlpfXs&*q?cH-oYMk&m04QcV4fqA-=&PO&<8*v$D|1Hh}QRL1U)eKn|H{parZ`LE}HZ_5qc?1TD1x_Sg5- z;*A=oS#V1ug;e#7M@4z3mt9sfWU7S$nBXIXdh|`_Dq#?zJZ6~h3h^+*_wKL{4cHye9n-z3LbLrZ#x(*hJvd~ePBD5dY$RlL;NxP& z$<|Sx-vGXAT1G34E>d50R(vJNepO5Uvud8alL9QBF8~>w(j&MHfrrAk?3zy%ij&UN z;fNpT;u`xN{LEk#m$&IO$O|b`&{|QofRgH@5w{S8e?IDK)byDQ>hx-sV5nFQH(Dr^U?SHZR5eWF?<6dH^OAM*v+XF^TuCpZ-7Ftl(s6E z+>T4agWxJvKK^kj^ytoUKxHYK2Lt3u$8JRdEDLM|0av=ks3x0z(Nct$x&i4z*s4~L&sr?(ofFycfxO8> z4b^oviGykWFZ-JF1UU__V>nA$I$$PewX1bgxUP4?h%WhLSeK;wcXw_kM&a06-6i(x}`r z+62Trx14U26eesQbcp)sV9Qb_G9`;$MXMKcg6}`V5sZa@ns?XJvABaW&HWt^L7sZR z+8!liLmzfX=jvvOf|HjGjPXPCB;4w1)GMs##a6A*>+D8{cmFJ#g}lp3W0g zJ@fu7oXjA>C2SVvM{1(HK}l7sDkG?nm3X=q{~m2gf}-%rn)Y8OkmM_%Sb#GU_TjVU zTJ1n;E6%^5Vg&OA@^9^7giu?EZwXycWiqFRY4}(iN>Fs6qJgx@8$NVhyreBLL zW}W~b4-M;7G3I@E`5U@f001`(m!>}Rx``8f%36wuNbnb=Y(%!NKkdx>-7*odb^st{$YoI4XgkeK&fj8{3%zVjeL9csDQSmF|FwBkjWkL{0FNpLj_Zx!(zoonqi zeY9l98Tv7#2c#9jXu1^asHU!Pip02j{d8NiUhJmIGcwZl z>SKN4dnE)F=Tio_;)X}sMibMyySt=8&CgpFhockIGsz+clQjr4y$Zg`D+&3?RIb-jUIYR_7 zA{<@h;ABiZ(r}Cs+pyqjvj5h)J(euf9YQvC(hdu|&ZZm6@!n*`nTNHBhn_lR&iYFf zY22}Z_2+?R)@9__1JT}6m4Z0!(s8TRf(*f6a1Hb$}#pCKLiWm2sca+ z@c&TD>qa_ln9kyd2A;N1dJ90V7^|YPAsk!M&57tCAUTFuE?NM=vD(*!^YfoHGBjnx zI6X6_F$9S+I+MuOZVw{y*LpZThGazr zcA1|puC>q36;4eje~$BsUX>fB;p0`JqS6QB(F7nhEnZ|Ml5BK0Z8v&9w|cCjJD2&W zo(=gtV$-soN-U#&!yfVYbl|8~nmzxs&l4GX!3Ob*;o9~ph0}X<6Q7%uW7BU(4T-JB zxolTlcP9OkI`Qjzj{K^jJ_~+$vLx{4PYmC)7kfe0Wm6T*6L>#fd*e@jW*cnkfjInF zCKQS;x2PcNVps1wFI1>I?$~CH)JJU7?u}!tA7lEgrPy{azu>21-A)V8Hh{jXWudgo zC=0s|8;$>RuOZy8l&@*2%&(~7R#P}xmuKE&{*X`Y8x95avSTxfr)@^j{Pmj+yB8Bg zFnT1>yPD$?H;hPU1Mwp7HD|mx8dwr4HolXcX3dfJ;mqF`Y_SK+;(!K*0*`<=pyY8j zsOsQ>Ozie_hsKcF}k+!D~Jc$5>R(tMMiV^e>DV??T5PNwsKnfQjL(V z<7vqAb@$?9Z?49g7M=Fr=qm!wjrdceyBoSid-JmGWwGhMhF*C~c)FFaziLQgj% z^>-GkxV{(CAAETWDb~FIh^?hg;+&2iMBK`qj`>oaj-P-93LCnXzr2o>-gx=^5b-^e zwI1=SM-|VSCcreJ4$SYkf<}4Do6kNHRG!zQEHv5R+*EjxNl~oL;1#R;?9wurFmLtD zy*=NM5AFM?0B(T@ectDobD80C7vnBH`tBjd{eeYJ+tuiab56-Cw6K@GI0kv=N%(j{-5eryu>f}T~ov%X0W@_ zh1l*|Kj8uUEeoX|)gE(| z{1>~WjIiCt6n`Jv8@3Uw(gnGai$mCsZ9v*`0jFz^8IPS!^`Vfa89-6h_R;$GhjImd zM9HmL(4x`7xU7=m`uM>FmZ+|!f@qANq+N^Oy(zSa84{I9w7maA=OZyxjk|Xb;$AV~ zqJp{;G2;8bfgAQy{eu?nNv9#Jmflr4GZo&C1SUJL&XOFga@d!=5vfG@+;J`349*Gy z1lzf@6X!Nl>!{I>-nl&y=zAjm!au;N5o2rH=XcqYHgo+HDnGDghF|E<@8Tx+M9tWQ zV=4cm)${uDE@4x~)FX!F44m?+JZNbZ9&GI$g6_ zDxg|&KZp{SQOJIc;Idh-V>}RBkc4+-S3mYS?Q8QmQCNMXk-UV(ZruxQHhvY6+&!+WD1DSk#(XAlLfl*>7?qVeyEVMdVbTWPmw_OVC=Kb!EC{ z9###!$VdF<7LckILv1 zPJT3BN)7_((@!gK$|<|y)+9&J$Kg5ioh7*iz$&FLF3BWSwYTv-w?{-m{%Y+5-f0I~ zq||U6WivPA3dO-m{B+9G~FqkTfesNte3LTX43h5NgH2sVCPXKs82h@>(8777r;tapt6VPu8 z6d-L^Q@SXOElY~nFA4}lAPtM*2c%J31NC&LZgrBvO&wcemQUP#Bkp8|gQ3K{V=7%@Y*vl???z*RACK%ldPit6AbeqowK|H! zD#6v|@=;hd$!hDXZj!G>4WIToGmJM$7#ZqLz%$~G3;?G#r8RS|2Gtcu*!$Cq2fnKL z?idKQH@g_P*>d;q%^%3NW}EbeF=3dnEoOfTns-D5&8|Kk^NlJgH+5FkJwH@SCs$OY zoK3qE20YF(ue8hIxlv2_y5UQ#wg1d&?{K;b#x@xhmnxUmL$2d4Y-yyH{;GS)E}d?_ z>vDXdYapl*s?q=H-|bMst73}U78a^M04amcCLA!$-OKXFY27^ryS`bj)b6S{2y2dL z!YLp9$I77G{@#xXc%T6}>-0C2-lJiAuY-%>%@4)DoOCW6QJh(~fJf($fn82V;Gb&l zynlptINOBM=IVMz@KfI|;*AZW41`1SM;jyeWjSPvYu&O;1QX(P4!HBnQ%e>$C7oO& z5q3pUyK0cZpu9B{=%1M*hj)dA_AguXlU0z0 zDp+12d0d>?Rb9fu`z}_oB5?NG?2)gi#&T)Jv??q)sjPdwIe{o=Xu!^)UN zSUoWXX~1hVxWMJHtbZU2~uuHO;gh0gL-Ta{&-!uPbj3V3rLc2yGxc+=-#vU2VA z@PncH^0aKo{D{jGmZh^v6*l-YuwMK_kpvmOvuCaCXtMq$!KOIU?7YBG-F0RRXO28) zfO@)Y`P7977Tf7)I`eM!e-_5Tz`%R+<|m-sVBX8%w#0b20?yl_$ybT6G)4{|H-d5h zSdV8y#QmCoS|V6U&p$o+V(UN0GTo%sR9Ex9;~&2@c7=O=nD2lBxrDM9^Uvpm|NC=a zL4>d!_^fo^`>%jhtdU>-s*S`Pb-o{56}ve&$j)VNMxac#3`Zb8+mncn8%f=JH+gG% z^f#4MqMpGD;Ns~fkekCo6u8XIc)(;YZ=qOiG~3}beU0$9rm!=Gvz10-FMk8KksGJL z{oupRvPH4;Nw?>-gG6F4KRq6x5nvU6?UVGq`C{D4S3|XyK8JBSY7={F?cV9*P66~I z#;UfF0)_QmEnjZOu5PORC~3rI(JwPkqu+N>2F;#+?k*V5>bwbctF*o!J~MtKm*#H^ zjmV(c8u0Jlk~1Ma2pPb}01@C4kn^AxN1TOobdkcIEW8Yq+`mu3f3&s%o&^X9L(62J z;8UE(Q3ab19W`5D`xQY3A+2|#E_uhH^xJ{bCJF^v1cW5k$OGb=Ts@z|vjrL9aGG6s zZgUi&gqny@eV6N5k-&4G@h;kHB&b4<)<$Q&C;rJJ{x3SM0q=&c_;!U$16oFe$oRqoxkj-jPx0*U()HfJ22To5>6jQnobudr= z|J43X<7e3oobCz$Yf=SCeX z``AT4;oXSa98DU1Lf>zrcu>BUJE1a@;6d*@;lCltd%8k4D&j6X|ANS6G9!4E(6r>f z7uJkBqfoX~r*>$RD}1{iTcDC=vI6PSsoA1>C+nN9FQ?d#k>f zd*^{PHH9{L+FYO|(Djc9C;yoKb~~R3me?rpyoes=hMvFUT@}Q&$wf6dqNyCtl~WZ4 zB2F<6D`n8(m(;n$8y2J>UXHQwpteuTs$VoGOI<<-=SS0)qJx@<+rB&P7#IXMXYUBR zk4_bw*frt0YqRodan;t>zCW6p7-d$Nd;44RW`f20B$-y+B2wC@9nqbzaB9)V2KyJi zcA98k_2)f15$}{(NSpxkmj~HWsXwg>=v~|j19TqDjFX!sCf;en@#yI(w}P}_b@U`! z^%(Rh^h6;AvpW_sRN&A)lI_>?R|FFw>r`RCo!c5|X9A?xACWreC^N&cMD zedVO{Cv#%|T*wFA*UG+s%rK!wqZitd__MNw?FIs+`jamGk|$0d)4KJIHv-L+}fYHNQDQ%fZ0QtL%fl<2MgQAa=(@!R99Bhe-$~56SFk-K40{|0=LI=s69=vRmnDm#vs6l=QULZ_=S02mHDYP7}oOUPW zVXH$h!QbRE4_5Zn+n0Nd1EozO4vB)|MX`NG2; zmzWwg@K^9@>ohd?d%~^AGS}X3P11{{A6NnQ6u#mM!6fsP(st@X?eT~7HXbiy)y~7{ zR{1aWD?E?>Oq_GpKX0cG#>?D*ljlwb+=`M8c0`j=AuP7BU)*+aV^_(IV_(m}_&DuUJ} zxbxPMJo7AWmwI)p;P;&#H}6OC7p#;{k^3R8R%Pk)l*{Jn^K_15e#iFERQsBfAZ+j~ zyz+!nwv*JFJ)`UPr{Au4Ag85Hh`uy1#WRn7cJR|+US%|qDDjRV>wd7uR#AZ*;Y8LW4!s=%4ib$u|~0tBAXBGULBqX0MM ze_{%VfHO5ffDDB|NE4HjUwaIi3{E}Q{R-7Y1O-V!Bp>GZe0+TD6&+w7VZ#4=vCwD8ndvruy9 za{cH8xGP?!>S>VoM^0M`g*l0T%RBbXYG*Yfl8#HA$B&W^39^TsIq+3)^Y_HJ-5r|C{pmLON*qSVK#M@*h~t3>4T^}ul(#i~7n4j0j?sKj~Rv^)7@PmuSB z2O;{=YrD#ye!{F%v6hCLxa`4ad-4QvGH+gu_wYCx_J`Q7ZPKa}4O?Q>_0Gnfh7Vp$ ze)^**{3=?U!b04_9O=~=S$O7`b-zY1nJ5yw&sDEEl}L_Z#o8X<3M!(7@2P3-KC3|ZxU6K$C%}`THhrEp5;qV&M@nUITuRStm1!oD zTq(U8Y#&e9P9B-V8jsh!(g>0v9)5ZAPPRPG(@ty(-8i>lpuv>=O}F-2af65b@1ek| z=N{*s62-2ys=3&Q@!Va#*qOi44`S+`z#S%F&$tgGA|7s0YNCW9^&s`?%evzbpb+4= zWsRA|Bzzw=1W3U49OlOza#SNBD^>VuczBgWCEt&r%O#M)1)2mHFja2vKA1yW+q@hn zt=M#$2=(VidxBJG=-m$=BNoo#agJY*)O}!Ed+pv4*J-i6fpUsTr|)T9t5-@6I=zsm zf|F>i=S368{P0N%aQctS9RaFsmfd`J$Nb+Qi8ap;{tk;s+VVG2i-*(k?MKOq3+H58 zPsDUFaezt^E&h>Q*0W*7=LP3UoX5E8RT^80>bRqmdG38!ne_cS0<`1x`PAh`S?J<& zOFhnc1@Jw`bPFyrWcPNd&7w1LD9WbLV_O2mf&IU1Im3cfMESN${j|p+eW6WL0s!kq zT?(g1oiQixEm1!5RK;64nJ-zTY+u8x=#$?HVgHW#`jzl~N%1Eh*FWIRglV<(G$xOt zsj(F(^F=%#Z4tWv=On8Q<)%S(--2H_<`krbq$URO`9K11$L+UwY6~Rre&rm&K-e(M zo+O6(>}_S`sxw9$Au&O%4qQJ11f`MewjjuW`1Ku~5FFg=)kj&WuX|roZfFQ~$RIL; zdEQYKoNR|&zHY}z?QQ<#>%-|-LeI_^7GXq9k=<&_O(CQEp@$!UP}5bQK3jq{wqz+6Og9IA^`( z-Vd;|O}fmUm=+O{>JJeEYzZxYac)d3pbLP+Y=c(}-W3$k@$nE07TKZ!#byyCeS#7G zN}qaRiKc~GxVh5j=Cm;zF&+qCPMrAL+==Mvz>f!SPSTJE3g@+^Z(mxcIpGk^h$ejvsay=JlwV(|H7S$UO<7J|k2r0P_rAsEt)pe%EN2a=y=Ndz zKGKbr&pt7moDa#Nqp~miIG=x+>bBUKIx!d4jp1?a-2hiHm-3zBMoXf8&i!M;5~g*f-s3d z=G;T`p#@aVF@=M$Ip{Zhhz<^T55gm;C9L`b*?1pexiCFYhXLBApJ(Oduk@-3Yk3dJT}~KMeG6XB z?S!Z?FgqZd;rhlft>3zO^?z{ndbE#Ne}(6!C#Dy$Ld4MU+Z+(hu55T43imm&9Tgc1 zGlM8_t5g#Evti@V7er?M1uB( z=Q$%4N71|fIpQq-M6a&-T@7%%nkx_yYgj{ubuP(hy7n*V1vQD8qdIN(_22K73@~!u z$P^jSP(c~OOg)6>lIV$6`CqHC5Wv_WPpJ0RtvVdce0@IS+W3A|a6S#RaWPuY<8 zO6>DV{VLy81hChZEictZFp5~%MpZlggw>f%EkWIh2qm`2ZI=wyk8Ic5Ae*JM2`?tS z8>%pV#MVitI00cSsz@jL{+wMVgzfX{jBL}tKGB(h)Z*KdhRY1u;}G;cJ~LinZ1O1n zw#PrZ$xb%2xg^avWHD<~6{mY2_rz_GQfqBMKBpxm%w4UreDZO z7w`YzI{vbMFKB55b5x97FQ#MDVj8jLR6U74+GU`?b3oMaWBAViZWdl$A9M~Nr`!oB z7%PIhN1R6{#_RtB9mH4zlI?l|mSIx*pN2{Y82n=<-Dc9}$xnomZmP*W3n6vOiN_f25t5fL^s>g|ZxB}OiXl;(tObdY71 zq_CJ3&y-m`oi0o%LMO73^WF-NP@!DbEM&3RK83?fNMLnnB(@a5L{g>33hKe|G{gJc zk<(meFu=1y4n5|GQDhidel_NvI!k~SW*8>QU4dZ5VwY?1LB@(Ff&6j|i#yn0rr1%`RFdPn<{zayOUURD0&4ldkJ;jbyO$;%tL;q~r)?Siej&hk6QZIrudKL()CpOi(f z^V$VgJi7N5?J~R#n6y`1^ggT)D%R&! z7g&kE^RoYP!XqP%389lG0lRnrl=VRg7H59LNXk!JG_9@~Ux8!M4BFRWKq_~C$4V$Z z7B}rqHeCNFB2G|6ihH&HfP=%-#tt|%h>Nx1AJh5=O#LPhNnlhbVjSk)5!CNGXP#^_ zZ))_a2AJ*Q8Zi|dT0%YfZ{(TS0JKnNbT{E|Ot?jt;qP)RV2wCAJgY9Y{fd~LLu(6v zgrrX^tp-6A62ZPUfogQBKH@BRJ>F#meg$P6B~{^X)RcUiKWOhiagc%eCbKa4m1t(>O32vY~X zI{g&&X4Zq82?+V)_xs%*BRev>FyH|gKNM&yF(Q~OHT9P(0En8J4diM_x_ViBgo&@$ z!$F#M3&v*je+h@>iF?BmiK3Q>VwO;#`$FD88JCx~6Mi$r6VLW&+jM`xX(h5*2@@o? zjWPc%)36xQ{ipP&^r)?}wX3{QJ2k=EY2EZ;ml-lLMa7TmCh3r0VN_iTT*t#52_1J8 zHcz`y4aHzI`QgMC()CEJ=AIGAgeew04dH}8Yh&;>lY>8s%*aX zNLa4H&>M9#F-*$@^;0^-{`m~4O{@s?6RWjZ8VmtEftQ8`2Q>$f#|CP)o_jicwQ57XoUl(^Na{v5J5g zWN@715K-Rz75h_~Fi-4s->A2jV=IgTsc%IuaPDWTJ3Ihk#w28Y*UnV!MyP)uNtUTc z)f3WoL$kV2zSS}Sx6RWihX;7NF#*f&JY>r)`|-=?K(>zl(|}!gXy%16tFOP)N@EZJ zGtvw$Cwx7hth3iv&JZR#?RXXEcUp-!J9GKBvr|NWf7|ie#y1=gaDTs>MI_C$%QgOZ zv+Qhs@ox|yIxnxgi}6^0TW|N{W2-Cliu&K5r}5;hfEV_kh}vdj1Z7;_7Jka4FL(@K zUpxYZ`JRx;Tl1*}VF+gdhkSG^iA?y6Y=^kf-g*TJ3T}CjsZ1NE)eIsj-+{lE&wPBd z&t!{_3f6%kK`yFJ=RzTVCe1w7=V-f#LV5mfH*@`sXfney5KyVF{-Ss-6^a|B-jS|Z zFdd}CZkO$solPcCo$qU8oS!=^icceEi7rz)tJsGct2zbYT6^}8e87QL3n-XB7xGj& zPGT<9T<{?|`J!%$hFgIz)Yoa;49IcL*>lE7IWiA5ec6~cX;+m@R#SGcb9?9 z;UEkBIC-XeiYD8Z_~dr&AlIB35$!inGJOb!R9e)>1<*V8kP!p$-Zbbj!lT9{X>li zKeuH5ev{p>K!2o$4qnKRe|z}4ga11$r#u_dp_AtgMvueQ$GflW-k+kB03L~nLI233 zc%Em(RMgUQmtu}A=n=Lln?5hH7%R1~n>ZxKztb0X1yXb}R^3{fSDPx3mOG8BG*N=+ z#~-<-6*)G1E`-83<~I9WHnTox2fBu)A5=vut13Z`VA5xX)SZl5+0=EIx@|(M-Z0!NHx|6lWA)}C zCI##itgtZ&b96DzcvLUZa`c)ljV9-_-&}hnw7>H7*<)xIx#+r1@vG5~K+{E2cbObyVU5nIA_(#qy(J zz3oDP>ORTK;S=&?Ri&qK!00LBm`j=NcvZp|J@UVs{F4G$3>#~o*1 z2)#`%?~j@VRl4=`qb*!eOE7RiY2m$S)$#G z)LJ!}_Cr4k6dePEd5I3*7mkr>ubmVcQJY5U4E2s=a?Q*B!$#GIBb3yB-!!HDKnP=C zVdIEnr-D+-1P%goeXpe)+23L&Nn6wA?yYStQrqpfs&(0%mw48Hig{&jS)+b^T}pNY z-sd#LXR3*~Mry{Tqkvh0~i&78+{PRo>SGVyi4+6I09)&U)Eot{_)o zDkxC3M@POlgEXqY#j$vVK=+aQ>3*6~5c4DhHVy<+4}1|ef$EVR6rdZ>(^L!mH}myf zejc3|tA5aVK_HXz^D8{Ke+;`@W083s8M;_y z3MdBbUMB;+UycvAA5GwDOEsZOIfX*W*inv6J*L|z{BNhi8ZiXC*C^b1mC}yVgg)r4 zqf94PYUfl-EBgzO{_e~icR(*5y-wNHn(wvWppp1H!s8HZP}~e!s9jVwJF&mBtGCZO zaGdRZlpSpM8L-zFcTR4)MWGz7vw>LaHelP99e#oMTw^&6-kujUhkqXM%==_|u1yjf z39ivu?bBA>n5&ys9kAaJKCSVKWz*eN^Q@wHSB?Uzt5|VH?xl;VIymyi;(Vz?CG{B! zXnMNaj`g}(v?jvACFe!^EOt!@8_NIzc%Zi|EExIu`Ki;l>#xu2W%#epJL3T_JRC-? z5&!;KU^6~%hSQkMmFi*yQ1Cl=X$vZ!SJ?0WpqB6#UGuv&3>`i{KL?0I{??;M|Gy!8 zi}^Jnv{Ue%2~lRoT>nx+9?8B`uDc#*6T|+lWqqHa?IW*GU z-3=;T(jX1eHPj5<-67o#Qt$n|AI~{o_{N$wYwq}8``Y`r)eV#>p57E=R7I31@Izg; zGd81=us2c9BF>-gEj+KH#_o3jU!1+Kso0JtHJ7qguy)CxzVaWLo2i-^o6ou@1VjNe zeiB}-q7r~-ZY^hx$U()2!l+#c(MBRG$nGk)_mr>;(&^?vXEppN)a=6B$bpedBvO1vEMBlwaWd=Qw9ZJH?sK4G1 z8aH3_!0o*GLa}noYI4^Z@uvo9AWk z(!J`MWo?N+cG>h=URgpj+Y(uBNC!nfr+)}7{3!}r3zxw?y12DG%NtG`v|8{G%DBFQ zZp%RPug)*_MSR(pu+QZ)B|;`!?`Zz)i{q&HV7xEj?3k=^;S!rGp#FM88Qb_Ak@x)G z!Iew|&7fU@jqg-62jZC_y$y#5Gd1f3_f9sd8U5Lx#6Nk&mOV=<;C5Vjhhc2=ZH2&< zS`0p+!8YdYp?GMk8#SZ@5q38#Rq1oq+u?SUzb1p7?*|j`yh^Qi1eqc6@L@FQW4quF zYXQnO2m5@6j1~Lx%4e@|6fab3RJ~cdcD>d92Ns}|($&a?aGFJV(NXKCxs<7hPj3M z6wh*uN+NG`(6Fa?Xvl!3!Q@QQCGJ+tp5fnB>Gq36bqqQ}A&5=nGq_n)9B1`2xKB&x zsd*)+(S$_PnCvD(HoGE>?8VbU{GU3pUCAPV&i1S^2k99LVd72R&1=?&aQ0M(LA<^Q zt{>jJf@@aJ_|(Z~kt6AzRvh24DyJpICT-O1cpt`F{BeW04PdT_mN+>07@B0xU!i4< z10?Vk2Wq$k<@Gwy$rfMFj)CS3{2gD7P4e1#2R0)6#khH`<6itLY~!sxIFdVE8pbj4 zz>?1E{x&5+lY@++g-$RhmaRW__wt%tUsZym=b66j*(-(y$4Y=6L7;R;gZ*kD8`#2qyj|TtCC}!nHY9;fI9sxFT8*eq>AwZ zFnzGQUMa`2S>_#QLi@3Hc{r6Pe4zwjM=AR(e3U(NCK^FD)qf0m6?}vYa*TIQQikxC zE@qj%6EjiFBfKp!r17nF@05D!iP^-)AW_lqpNF`moCqoV8Pd^W4yYsyZKXEr*t5*m zHm5MQCFUGw5eIlW`82`jsFazE&&m?p_*3?m#Fpgqwfnwb;5c(xdK<1_j?Yu5VYH`X zCP+&5b>JAd{!~m`r+Go5r@A*pk9a><^2>ErJq4H>fnFqtnxyXzVlQ-6N&U#ZqGrK) zd#&^L`z54kR3FS{O%x_O!RlOC%QT^oSG%l1e^FREv&XHAUXzx$et{ont>zk2#O9X?U7qUAK&c}(p5t8K5{Y3dw0qh^a&%$p~X$h z9tb}u4i3qEY{XrKxsvi)*4iJ4XaR*V>TDoJ*|}TalWlmY+#%=K(4gF>k7!doJW0u! z5ruy+Y%gD5-^P}3p!C1(>QWqJC-{B0EbAWmqKFN_CYVyAlFhVyO}#p8 zEk6nI+9-OF30@gMbNo7>g+MSmNr0U0_D)(rsEii{G^Yn=f{>6fgE!6iZmLL0YdE0l zob>na-&4Y${)+l<^`Sl;YIoIredP|@YX_(~b@lbN0k8g}0Z${mx>$;;s3V1+ZsPzUlXPA{gY5WZy`37pXnw6 z(r!YhaFfp@qQzLyIKaII6APpbu_^Gu=5jsJ!e8~Cd!Mjo*m^QsjvS6v5mwuKXcB!F zk|xkDVSyEZ;{p{6-(&g%`B6Xx#iu{qZb6N^nER1^@shRX>Gh3G?BFUfyx8CEBYHb` z1H)R*BpUU|X<$OpI84pGXJyYDyr%Sgv3T{&{rZKv;?YdNo%03hw%QLrD05wAKR&_9 zaa@do!PmG{v&W=$d;dxF59(kNc_N9_{vRq91p&I9eTzb=XEILV)D)yI`3TIP6C1HJlu{4M*x8uFI4TOh`Dkg>-t0I`2@lQr|+#72YXMYiC?7+{pYxstzACt&3Z{mFO34^0^SSakreFVC5 zN4w%4j)Nw=HnfOf$O9(#XP|!r#`rWh)ZT@?3z?>2fIsBJild)Qz)^%pkOR z!%Er05}^eMlsJMnHSN8wv`z8JLdOh zA~$RYx$yQTv{(+EitpU+o#3AB3L&L-s7}kAkRco8plBj4+dTW-dl!P~3-q<%_XHaZl$wbON;rvHr*Np`OZn=N&ZLRUvGDJqhW7#yi~3D+G!) zL2VfcUTw$xT_(fu&n-ruzxJ^fDpbj}X32Cr-G3}ZW|!Z+RolGZV#vxq2w65*-t@qp zhXM2RC-o=1><**fcQ{^m_(drS+G;PBAnA3IBo-|D>>IE z9SWtr8l+H65=IasV1`>*6jbj#s#8JJ4Z1w;!u_5BxqoWmArQq=|Mau)AySEwr7pMP zycr}3_t4@570*fyY^RXk77~iyQt{uWLr6!BME$`MyiGS8H`3mp%&^F4>13E6UQ)$6 z-g_~nOyN>E2m>ieG%l)IUR;=j1KFG5w7jzExU005m`d-%cR_U&yka+H(Y9^`_++_% zR{HnKqa6=EM~TD`5*bxYO_ZU%BjNcXg`GEQj>H$LjC@YAF9Ru{fg)?88L?Y06NUj8 z(WZUY&aYvasq8vWvmdNQv3DQ&X=a`A@E4XK3z$S~pkng7TYi9dcv{{$g)<`nVsvbC zK6uAEeEHX7NVaTce(1Y$mgIptF^$x00%F5dYRz1rNpxQ6$<_LwMr0!1(CccmSD#_9 z^8RCHZs%O$bZ8GElvVhpVTSy@@0a*don6!23pKiW2Vm(qr)vB}oS1euMvLj{Cpl6O0ehnA(!bW{nJ9Z zi6rs4TWWpW`vpogy)6ol#>@v)&(Wou_PKv2l86&GU(ZtD`j1WjEk3r&ca~qqXL!sB zeUE#b3DjF2JQ~s*sqAogQ-kTrA`K2poxfLj$K-TWKFtdMF<3pU>q)>7E380%eNap+ zVXW67oP1YCDb{;hOL|MtbqLq5RO(I@WzBZeB1ai?1fzFA$z?-yneQ_{5ZO~2i zy{kBC01PZ2mxUuyBOMH#g=6fDZJmk9)om{cmLV(Ok$@AnCYrB-@d71@i;ZX`rf(`g z;8wPOE=)MBKO2p?Uqpy~&1HGY2GRlX@DFR@;o;w?)&9#!DeT=!TpgiF^a}X^;O>y1lbq&7@^$a^m(wTT4iixC5V0 zERl#QHijJi@>pL&Ci18btLkW+^+n-rKiVElO4w*| z;y6EEC{7bU1d9nD!ih;-)!N01_;bEXjNgmJYBTY}mU)*pI~GqaHh*LT ztLkj-yA~>)>woGMo&j`ozGCQ^dy?4eJJs8ee;zok&-AjV=`r;FS5AD6s3y`W^a4K< zEeS~&1x^Mm3aQqD38c4W$dg!M){}ZkMD>&hwW5HgjjoG2f_+q8!}#SYg|l}uutli- z&Or9m7ZZ2c5Q@7lm`K7y>{WZDkhM@1m9?9eI45r+{)na2qg3f|QK)XZapr&ql_e)}K$W+ZMqx!mNjJBRx@KUmitVwUOgB~FpzIbFiA|vXl6tW? zRKO|{lOQUsVTcp?y&ZvhfbQKo!~6!@UFhkSIJ5F9%bjt_O6ux>N|8-Ui#sWuR|32V zAmiUff!k3N>(jb~oH9Qe%zWO$aw95%4LYRO5c)i88AI8)@@6M$twgK{zG9PyXvsh~ zoPYXC+9!+8Xpo=@?q}sS&0gbhp1u%AR|+w_Y}xFH`~4I*M3_NDg4CG? z{^YU(@Wt>b|6$8!=5$OrZM?_G;X~cd^(Vv6@g|bs&4JXE@q)~TTro2E2o2t1%k>s> z0AlxB^~cWEQiDBzgxDkT$glYs*b<=%dVDZmwFCv6Ag3v;7DmH))xV4<(uQ@_;i<(j z0I8%zXF?myLD}_)u}`=NNqdtvp=AIeI{tEjO`WxorfTBVIaTo@rn+bUM?jNVqO`sYTXH~7m*#|EK9SL8C~;4%R|)?OO!2kg zD9Kf4teav8EtKQjF07zfGh<@e9iNwpI~&HR+%UreDlY5_HhJKjj5>BDxWYD^8~i#y zy%K(*9eEINV8nf z-Y$}_48*kot*HWM#>l7)-k^Wo&rhoN18<)zzoIV_sBy_o2PoJve?wz_Nvc^Kw{bib z#`-hWjEz}BH$6hha+shBM}3;IBJY>m@t19Tpc10FaqPMLOy$MolspY6CB$AQhbYa1 zf@(42fOv?nCKQO7&vSE#g%?fZ?hCVF*&n*vviubK9SzeD@;!D!}$C6vg|ouDWYpV-fy-= z`7s8N=6l7^Pe(iMWDL$KU3mSuL;dX4v76H(^|>SUwsyc835=8HRhQ@9?g&*jExEPFIt|U95*!_`k9%RcJQ2_D=y!EEP!Inr?0P7`>MVCf z#%Ih)xl0F4x+7d=GnO*Y2=KrVo9P-wAJZgnL5@EH6s1clogjF@87=Gs-rHGD6m0D7 za7I2<9;ATee=?=kML$O}?`dxUFs`v|?po{uNA`LuA|MM7F!mJvld6}0&s!4nQl(1w z-{4HP*;8zipUG3zNcPSd5TaHAHLa-el0dY82Pf-_{P$KZW(?+W1DIQ`S-E9{tLd1l z3v+d8<+uKS&V0RF&lg5SXinnHtKKm6>%BsjWB}E{!NDQKpvD-^(DyTxk?;?>FOn9In{ws<^z~Zw0R#8T*qZ z$Re(n(nq)alfScxsm^rpv)ksRVfKdm6Aj2BZf$SHeSE+(=Wju?C2%<;&!8NtSriv2 zU$Py(GWFV<>GmV=?m(-jg}!*Z*baAC|9FB!7?T1Y^hYms&oDk!uO2TotWE6?%t!4{ zKTOS|XKOwJyjn6{>oys)(DLMVZ1BhkFVGQjo}IgK-pS?kvnI%x&Lr82-=tTIPQtbU ze(Ps%rrbyXtBABVjZEh$!3n$zfhE&@w7b%?z&rUWY1fTZq4rl9ij^y5tP^j-2#;5I zZvQiy=Lx#MO!o5DyZ)bdBl79E5E(+dLinmef~Y)>C)Q-_oMQ$EzVwfHj>j_aUyYrcp2<7ZnQX2%-f{xi+$d2 z@U58SQLx8bh2c~2$Eq3*-;HDbh%2 zPfGd1r_Kg`$O7{?JoG0%NqHu4YaztQ)#WIMTQa@}m}!TMDSPRgsk7N_8h@zLC&>%# z=s#gsPB~6Qx!!->5ZpQc>PDm)3KW?jc13}wn4B@E^7jc+JhC+q<140L5W6i-F%^Go z|3wjXxIin!9s6k1V;*cR@)HnZcy$f>6-a>cLLL*rp^S6~&&+6JL566E&X^Qa4U!)r z8XHM*><=NmpY*Vni_13rG(AXKU{;{8OsC{Q-FQRcfCqb&a2hGr*G@|(TA*ax7_;$s zC9>tHal5FD5wUG}R&h_YXf#gTxF*y)2)-Jq^ML?0Fkbz*i>?6_v(kSIrh-14)4vH9*6s-?bz z!+^sxh|>No3%j`avN-j{_bN-|9p)3iaEOYP1wvSh8Zc`SF4JaW(g?&G&)0GQ(2ooK ziL;?1)~qVlo_^kt58AZ$YUd1 zIwM}l{d%Vssk(CDf9+M=E<`Sd#2^clZ-!^d--Lf0XfgH)RB5y%*te3iTi6@udD}i? zH3IZxrDn$ij5^Aqff}%rJ?-~{cVjH?RW(qZ(?Zhzvv?DK6A!9&A|kfZv&b7Xu+<;% zLW^yw2A9o(Z6Q3_P+cTf>R~5C&M6+=B!C@zydw7Zqhz%Gc8J2(`Pe^{2kNjc%9IS@ zm=Z3O{)lB$+!TG9d1uw?7)PDzNz_*qaygGTrV{wKothPGflo#oZ|lUnZ)Cy_aR!=kRPS%q;se+L8lBQM)$=xwbi`|jg?r=6RB`HbQ>h8j|ZOH7VXF7zeQsg4LO)<6~VR3wH!bki}v{;Zj0Yp z1uLYu@+Mo_j zV|~i|-`9r2-oQ#GUwpuG&2-O`cT91vtS+b)27c-)db(}unys0rt*u>Xv;#sg*P5dY zVzvt)w~e_fgGa87-yG6&DI9uR*&b`P7LW667=#A;`WRdfxe(EL)%(?QZLX0VaXlH$ z$AACmxVdpH$FjN|{IAGukpGXj^g~ze1eju@30{*4m^Hc$Nkj^f!!d8Z2b|5x=5|7* zRvNFwJ^%EJTg_A;lL=M_!M=LudFgTevDynuNtl1nrjc63&us!MoNuZkETIy?sf+^i zzWu!I)P47~ZnAga!$_Zt>$JBZ%avZwp%U7hYwKWA82|+nguV4p8+S2m&2c_f;eLO9 zfCY@r# z#k^jjZ)~3u?D6dsyhpIipr@aVvk1G$b!$Zv-OcDtWKuRJI@NGGuk1q4JPF_W8Oe5DP147x0;454j*{BoRjsJ%N7iBIPm1fJ*^OFQ}k5jmEPJ4i*XlLZmTwAE=eI z;9XhL8`tyj$@WvuY}3U(}ZRXJKjAs_lNbSgd{wp z2LAHICkwcdzU2@&*ZGS$gduaHU@S&m`<94C;{BW@i^i@By20~Z(8c0(QASBK4U=lG z3Q*!sTN7-IsgnK6D~>W=Rw%j|U|ICyCQNOa;rF!a{8ju>>q-ynrF9W<#wQ@qR`iq8 zVEuTBFZV&nKsONIeDPCi=r%){kZw}kH0+XqOnXTGOFzb!>^wTqT+K`!5z=5vZ`wwh zSFB3T79CQ5>Uhfbp)f+U3iB6oBE6d6H#I9?^|SX{$fSiSryf2f-lUGKxjr) zS)gA!*Q2wnE1gdAOI%;849O?b{XE{6Z?$X^uZj=pG zY2k$DM4FisLnfMjwmHgZ4M6oK{geS&@Z$*WXI-{_DhPrilLVMXshY+&acgA+@)#_tVNY#znh;sVM7-B*p{?ns-#+KH3*qeLen14kqonKjk%&! zB~docVl@#r=yk>a54Uzwwm8LTvTw~=ADCB8IhCpkeA&Uq%(Pz%s4+zxdWR(OBD|8+ z*1Ecis!9s2wk}1S)ulQ?PM@4v^G&yE*3M9J^a)t0Gur}9Chx~XF8aPZ`qnbR)h;p! z1zc3XCH_`I=KPwdznP1aG%612>hMB7el&@1IISMIc_PvA(SvG)_KPqb$*F=WAoDX{ z$~}IJ74g(GlY!hD2Zhqt2fj)k9eipWujJ{fZ*ey)FRQOUYjaR${F!) z%muGUn>`@`z*7+9nwGQe(a2osXKJ8+B?&jh#7KEG?>0FU==T{gy`A)LPuTTUUGP!4 z1)_zlcJ=>oiEIK8QdB7Czx``QFs(O_JXgx0fWD>@E!!ytl97^@d3mk_mM-O7&R5U< zWnz^Kz6bQ0L(Xsq`}r!$G{97&ZPX8}qxqG2W%yppi43oHhEw?!E(#JPqN@zrOqTvw z-vcJIT9-Xlq#alS^?dut|4d~8IjzgC65x~_u;1;|E#&BK~(%9i(dT z?Uz?3nP=gFxM%4F@PhWjowXbvN$vay?KcT>6Ecjtm)|CaU>UtnJ3=<~hAY-OvdN+mvO??) zerPMG@tdqC^A@pMWyaye|6Ge%k&tj8ive*FEw-2NP>Tq8TSo)ucXY2=hZLHl z%hWB+^#ELZ6YT`ycW)aU6FeCWsR?~~U{x1O>fE};JT`D~FeQ_Lp6-%Dj98GbI0jgs zL^!X2XHq)A=6tR(27r>+tIp3O*2m?*fkWQ7{cQ`~c40$ctw_8FEA!aLrMyHTg`h+G zML|a~y~9O+W@pN^`)dwYQc)z12sPlzfTC^(mYQyW3x_({pTb$Jlm(h{EXIW zdYzo(AJ&%!>a7q?dT9RefYZLbv6uxNa%^riSkt20@}DM&gf((2X39%;TY-5~&i;Wo zUT=vqY(Tf3d$-IIw3)sKIvp$_B*4xBp@uN2O!^@~IhHqqBFOR$L*z?e9ZhFaPo6QU z?bFPiPZALn zm+Q!A4ZB!HM7xKls@6iP#UeP3NA|#70}uniCIjV3z>Ia{q=ZR+J5RNo4m1htCkU!l z3<82FBkjl}HbUPF>M?k$0pX$&B}JS|IR*8euf+v!4^|{jfQlNH7SR z4((?%Z<~5!nlxyttbP75tOFTnL8FAYp4oq|j09;1T`;;4Glg?}u_B{x)ft!4QPvs$ zN06RtGuepmNTMZq(zk$HTt-hv=+gA)bl&xVLZoc~(PN zUF;~P!_$0HYs8=yjaI}Y8OXxk3puA|4$4(dP=kGW#c|6(>tjyfZoD6Y#skGE+ zeZl`rp%WdONO-8IimDs`?g5MG8_BnpCdB?1^oq))@UoOMH{ZZNK;pY)El`eC+Oz~c z{f+KF7=_L+NLrOiunFsjT~&0wkfE-(vEx~+GE(+pm-X98nWWXg24Z_FU)GI?m{3tK zXJ$BsmEBnAP$vwwQ)JD|qAFJ#Vk}Jfz@sKb1tT{Gl-}Ac*Ts*LyL@9xK$OAvbFxLA z{%-Mb+St}~vX_uSQaRQ(erR_Q=^RxSPgI@~=_HF(|Mr_wDr8W2DZ0rsAbvQjZ#&|kq>=3zThR0)>_dP4QWd8p?6$NiO zbcsYQ3yytD`-aqxHe^O|v-;)9xLbD_T=Cu1HcI82 z1&DQcI*^cZhkFfIzf<@TkR}`(!0=2!@(Pg%?fBWi_3uDom%UM1JqGyK9`Guea1b|4 zhqjuhrbPh?i7CW_nv)zvwghr}5T0zKH8`p=kuw4>Hq!1~)egg;GU#Tk6qY^!4B`o)X{;^A>|9S_RwP{gCBu=UUa!A}S>a!0q@XfPKC8YRx*}Y6Zk4uxZC{ zY@4v{a^qnx*)->3RYqo2>}i`loYJZEsd{@Qu=c@kr>Wtuzrom1(h<`!!vq5gBVRFo z>cqmp`>zO2sL?Dh;K(xGz{Mrj{XfP<(yB?V5j#52bjuNlw?7vKJ0k#$SP;$~@S>`$ z->7a+{ilHWuPwuJ4zeW*1|Fl8rpw$F>r^)7usIz!jH4$3k}9mI6E{bdc7d>}%ai)m zP7GMJaHQPfJK$$>CdS8Ulm3zm0Bb{g+U?Ov+OqmBY>fXf_#1htChq_fS!4XDht4Sj zgUglPZ^-HkrqEgnKa`~d3mf#(Rdz1e3V1vKYuD!4S@naDk=G7kEVU%nYOC9{`jiJk zZ{um#t3Oi3h_*8E#lM>)Tq>$6FJDyft1dX>0Pm&xr{wFyxZ;NoLAklq`T6} zosVPRZvGeG?BeHZ+>S@rBc|^id;h+M%0!a~=JlC2utVk^amD;&rH61s?zP$Z_Dt$I z;;h@0qkTelrbZW2Y$+JDh|aM*BMZZ7*buLwvWT;ewSQOq)_eN4POT?q&vV zoY>Tk*o;2&J;xeu=YLC)(PAwzsuC=H$+nYGM}a9>OXvRI8{2;-jJK8kn6%4j1)fw` z4;-?eH}GWNaw*uJqfPQme(DS7)E0E=>28R$v|8#tG7IRiLimpDAbkjKecME)UCW&P z>waIr?YbYC3%c2n%gm0od`lKA*cDkr+4bIbLt!AwAm)3lchlg&6@pJzZ`RgHLO&ovuY%7H z8gcp<6n^F3)BOI3y=f*#pc*swliEGnEBZj9>gN&HsNh7}UfHLx!4A|Em;2r%dYiHD zHUz?dUxfay5f-0(N_EQHW#0An7@k8@%a%_qcOv0^$DjPewnr5oYud^7k|Jimvmwvv zQWf!SWp`GSkqm57a{}svx8$uRzr6Wl!oN@U4}}nXT%-cc#){xPp%#VW?)zL^(}`9e2fm{PLIY&X{+hvQ zkoWC0$>TLetj|B;*reyAX-n`#=-@uD7BnF@6sX|Le^htivz;V!+h_HkXpTLSSa@?c zTb&?+NhKXIc5;5YthQx5z*-0!cAK(eoi76R_{Q+2VNe0k8q-~P-DWvlYr4P;a9rN3 z!i+i4YPv#S2Ih@fVU%#xSUF*9BAk@HI6bOz8Qooe7bmRgVsPm+HVG~PFmBsfI$;6f zm6%Z>X{(&|zno(qzu!b>VQn{`TuM~&(&)$otw4LI=(p2he}89!|P`nr;o0N0Tdnar1Rn}yIqMx3#^PUNpHl9_(Z z66lE~vQ_fF1D|By`bN<|?ru%2jpH=>P~F-ZMG}&0ZFLZ4UK~psDW`k9+1{P{LJ?SY zb-UeH;U~qenNd!HrTv*f;6Yip})4+I?9PSxMj@BLWP)PsZ$atW6>a$(W$I_JP)v!+Dd_1M?21063(brO^)O z!SJk~K~c9t583Q~^j(9sKsbtvS=z*%>U-T~Vh&8dPwtRa74TPo@+SJu3Uq3&Xx8+}KBpigm#A(*SMw^@C2T$8(6d^h4RpxkMMG`2MFZ;#RLCaphrAmsCUcEAZqJKEODJvlrz zp`U+Rs+y;V>DN~z+(DX4S>|t+=anuf8^EPO^?Py{tF|&<5mfDTes7j>1P*zuA-9W! zyWl*1-O&6`M7iC*+o-$p-OU6q0f^SHo>?i5JHlgUjupInzySxh-bK5ovq$3MKtO>{ zvi@BF7;tYrhL@s`k7mwRv|X=Iw zfwXChzhFA|iU*u2QvX~nX2glWe}{Vmg#$5L;zg{#T@(DgT-no9#!uBoC?60I5csLj z29oG!NuTB~?>=En0zK-cqnpDxqZle3evj@*6$}gvKx^r40O`t$Ld;1hVSDPs9AjxTh}7csgJb}zmr105+ZbPEYp3?El zQ8T3+iYP`=`;Z5r{YSW-#;&M=Y=_O7v*pO8SFA0WJvvc;%8zw-1$7|AWI5N#@H{a| z#C~1Zucu!&1}XX)c^8DtP{ma!0f)(NmxVy%U=zabngP_xee!YWXV+mvDIXT_J zApw%AT$$*mB2ekg4M_uhTh%l&r^z$v6Ekhy>N6t-S*APYyWB34M}9>Jx_PxuWYfuQ z4(lJ419k|-PI+}_CoNf9<8tC9@GbJn$7REME8oV;0u=Dc;x6nZY^so38Q8DwNFWF0 z&C?p69n5LUn0FUyO(OC9AvWiRuf1eUIycF5qETCt?_>t^!Vr}Ma0>5I5MK0Y*Mt*fW%A0$3 z(|Apa0GXIc+Af~cPT@C(eP*)lBQ0l;ll+W#*zd)Q#ABZ?7?y~@%7zcQ^dza2kk$vsX}$@fhMgX&mR6!H{_|C+sWh? zPx%IE0uGYUaZJd?()snDQ!Iy#VKN0Q=F?vhHkSiX71cux8Au|ak)l8B zKepJ6Sl=P51<7IXe}-$f+_A0pb|EuM7#$On)@_+knc91QiRG;#Ljd*avdtIZ1X!=( zx1GbA#?)kWrJ{qkeorY^vL#<7N1^;o0NOO2$l{4he%Ge~gYk%ia-y;^`*XsFF3k~L z->iq|7^eee;i{~H@-&>kg+cF(viTExQN3d^j%k0@nn{uMu#}*GDx3)4*iRXB`uN80 z>e6YY`E%K?`%D{7?OGA*t}ZN0?n=ja$B|MIQ1P(@2CZF4Qx|3o;UC>cPDL0$9;d-~ zf7e?8EeS+*fbmIy)0CWuoVW(l?LS{H&N7xMQm+8c+)DpShR9))E4hv5H~O)1yE=vn zfq91cUYn_-t>?ebge_8FCLGsld=|bAnc`~wa(pr)=zJ4rL17(UnOB%14aS(oDL6ct zk&p8|E8wWu0;<@R11s#e*)yf_O}*IFSrwxn48m!i@r_g z;Z!(uDKdDBMv2A8|A9lD8`#|JDGP(rUOQskhRTBV-V-#wrIedUk|jt<-Yh>N<|}mK zSM-Lt9lDfuH6a<_w3vxD?JkI+?+Q=vb(6^m2i45Az-^2E&DiEz&YHSviC1BEdzq^e zsG6R6W&pW+Py?kxd-NDPHvI`0diGiF-cpY+LhQ}ghbB4!rR{I@d z3}SZsIE4a^E%d1Kwl=UXyeP$#?X}ix50hQM6h(0Suw6iCj(f$OI) z+bS@Y;EAp-rC@w!!eN|oi)cSJX(3FvaUQt8a>@ zMEHNyV;{U3v_e{H$=z*-r_V)BgJcLj zy*jN%=nXO!Z#z#=Ev00269IjJI$*=%^)NL7K|6k z+LoJ^g2uj^9ecB2Wx;y-VQhv)fE=~A*_m7?v)WEXWGPMi-Y)3H zR*!k>J9d5#fN)js{iMk>450H9D8WLycu_x!A~X(A6s)CaSY=&_a?KRKg%^x%lF16c z9Cm6=nzQeD6rRY}X0<80kPK&-<H7lCcuSa6A-m5C`Lhs>I!U6F z@OU{`68@yy@#pI_5P~1mc+bRvnn_>mB8i9>nRgsTTw}-MX6@MOM6spO^(9qU&^9A@ z|K|Y-|D$zm&U5T_W<1LB&i*ouN@7(*vDmpW`TE+eIE?R%pLcB1I=f+2v_5rC|Y0>X-ol-n*Oig(~cG z8kW}xN!YUB(WlVv0ATI}ZThg&?E><@9czIzrEX~X2fL$Dse6%|o#r`NLcsID3nkSH z>p;daa|ARxa-ihMrq^T9VOX9H(WH=^Y#lXSuOd>I|H9>w7^LM!PHz`8 zrJ(<>d_E4f;fu41lv13jZqVH)Cic0kfqa6NIa4o%7b4IL>E769NH-81BVc1!{jBYz zkvBs~n(CCa!#6e#v0`~!{ z=#3GeL%|h8VYHHhO>9Av!_a378mK zZ=MQ<4MyD3xkHC7C;)qcA`rGdZ0aI%D(4Z7Bcym$G#+uvdMq}io-Yti_oDPxsgn=S z*K%4%%RbicqWrk{XE9Mn2A)mG?S{9D@Q8DJH!g;mm2G~d9}q}(i0FFZp`q`x z^YB<7sc%qMSaE1T-r1s&%2F}Yi4>nq&nxDDr13pBEhyPQ{g?gLOa*&78~NhHLq@&M z4zpAhgY@6NLbEU^Dr!`un&NwXX)zN*s+cXj#Egt+UQ1REg~}Ez`X-y7|CN8KVVLo6 z%ywEjtCho46^53hfeAb$N!c+d$4-A#~WxAM-eNdqmozYGaky9CYFFC`fN zv^OA+yiD8U+v7164!>bkz9cRC;qS^f+YsT8&W7tEvseo5 z5dc}}cr|7U{6D(R`YXydY}e8`fOL1~kPck}N{4heNDkdy(uj0ROLsR6NF&`Hf^-YQ ze%|l5{jI&%;t!a$W_a%By6*El4&IHk^RS_M%F%Aer!|9~;A-SbtuYlvX8XuoEs8}U zDKoNuAxZ8b*OAUi*UJqZu#T$;+aSL_8O|ol>@Wk;M12!nZSXK5{=$nDg{)Q==v!OO ziA9dP(PM;f&}C16>4@b4J#eD|q@$NB_qbBT1KR3C)Qaj zuv1#D31{T^yc3LU7#sO?vM z(J9oif4yy82#blM$2SU3uT6;p>4zf<9rZ8L{-(|eCrk9Q$qCt^UoIi15m^blIjZ^~ z+K$8$`HoiiFZZ6k!G#(5ZB|TZ$EMuU-8-DRT_6U1=rD-lA5mFYq57?zqF4}RPRT$Q z-?ncD$J)^d7^RG^HT=}ln-^U$$hPbvKM72y`2#t2C>9(0f8b?O>5Gbd-M<-iu<&ti zWa6pL|3KL@1xn}{6uKy?w5;Y>yTA#%kN90@LGRng3q{l}Y;VMs_FL(Pq&G+~53Jp( z<_KB=7To>#P>2!f7uFfHAA8iS7$Nw`RAbov+-*z*kC>ty_e|ow@UZO_E!IP}39rJ#(G2IFyIojq$b3l%lngx*ApQni% zDTF8;x`qp9)60*5y)3d~AnAaWNT}&-uIj_8<%4tM@j8lVuKnGL&75;^r*>3rzwcbaM6K_hKS+hcjniG?x? zs%3aM>Ez>oe-LYx??xMw8FR}Mj3$xQyT+R}9e*CKSAi-|4mCmJEgmV0!LiNl2`$v2 z&RI0D{kBij?3?*}AB57h^{Q=h?T~f=&XrCS9paGh!S+@z7tT%xj<6^D@XqbYFt5RP(yNBQ63tiYfj_dd>ve>Y2 z&x5Tc*wi-aYEVde&Pd#9_&D=p2@jXw5<0yuI889TjR9=+;kOPGSl}_D2|y>R`%UBO z7JfKh2B5?OF4ek!ot5Fu4>#;;H%`K_Q#qC&KKE}!!uvBz(=V1Jy~eYL zV3r0ti9y+G5DTm5`uEFqBcli1PzoYkAtL(kMbtmvm@VTHt~#3i_`201B3FF-tL8fx zX?Wf+o000%0dRd-Rz923_T4Z0$M`@S-cYMw3H;(D)w^o5{ zb&+DQO`;YK7S>qOV+g>Vg$YS?>|1nQ#M4GR3S6=&0`zkFS@DckAizBaaJG8ZA;kv% z-H@N>0CyC$QhW~!tR%vNdAXcy^+J~AI9{YDEdyH1D+fPv0EcGyLmodPrhp6{! zd!w8RgedNw@lP)zmju53s=N8mb)_U<^MoVcE)E)EtpH$ja+7dz;A;-npg+T)=HNIp-bLt>7a9eF_(pK`Y9Z0Ky}{ zj#sUHaZ*Y2Q5<30+dkB0c6bEDs`1BqVuJr=11Jb@@2dpT#Mm`8vfe{`$=*Y%?tyOJtXl+E)=SHH-=k^62XVpx< z#;EIJPDADQ-~R`g1Lyvqy{3|0_sB+O9DdJikGzDli@^xM-L#KWgDi1{Xs z$nzbj^vW;R-l!zK0H6(;(<3X3K^%MsZ8_-ISj*gOAw_oH@C|ZEt;xbuSzx{#px_O^jp;Z5hVju{`p||d*b>-7< zOnnEJZqhgQ;Tg@;V3{R_Vf*g(uV~5~kxg*dX51-BVlKJJrHsDaHNuiob9=Y2dj^A_Lmopy#88}DYKDp zOw+U0n3Ply91fp*sbt6X=}KR0H)06!y4*v4#Xa*4CGT50YZ0`(3>8-TGwxp~U2+M1 z`9C$lelYgffE^OW^nc{ANiTHK2(LoPF@e_CnPJt}x{|WRJ?=T%HDch5?JwlWGW|Qw zeX-f`zBhu2JAP zP5z+EIP_Kv#a7id?=C^2LEyDgZ}eyD$j0aKsA@K1wdp9Miw;7NDj4ayfd z+-~Qwz@&g>eCfEw7W^LC-TSsj)vam6Je6pZphhg_|2NywlADQ zTu*)(mSf6hxn9AKE@gkj^B#qB!LLg#0E6$_O*e%??dMsc&K4KpT*FrFYy5;fva(vQ z(Zc|*fIck0b~FA3Cj-RSbqr^ElP}{RN4n9=$5q&`8Gp9nhOiS=M{Op&i zUu*|3I)cm*;?Sa%Ce7i^f79*$!mg6jl)N!Uf%O25c}r&YC}}lQa`dZRK4YK+t8})b zvv&aAbL)f~q7B)p{G)?G+8{dFyVFnqjhS5eSLcwWOnH5t{jo(ix*Yls`3C1sb_G)& z!(mI{F1$6mdZ%8|osie*^7kltwy;_lM(LRI47&K~sHPu!%aN2unKPv^+ZK24a4QlA zbQ7%z)gJW#GiX+1$i?1Z=+GGeK(uDF`FLxeeAx8)57K@eiS6+P>OVu7~|3G>~T;w9NKJ1MPM zwEsZU2|2y#{v=kJ;Qw02320wAqU=9P8;rH)@mbebDje`F?o)ov1TR72LEjpK>Y4&u z%ts|O9^;PWoAZBwmgjQVZizT^WV8U^Vv+I$*C&%~`J)P&B8>S<1pwi`TNKM)WN|<( z%B0d)3&iy^ypc$ofxGE53y@mXt@k8&PB1(in0{rTGDy+#V~NHF>PjSng231+U_4C4=jo3Ww=JK7KKK6U*X+(hm*?8@ z4R1g!MJ6P(vFlkRvaDnX??*1eWhH&E+kl>y?oJSsy~peS9GIaY4Ytb??D=<^Z*Njb zonl_)?f$Rcshi8i(ZRgy*%3X!aH`_zmZWatbIYM?8lGE+v;#sEfsy`2V`kQp2fpz> zRckBxkN~|^2i%ZL)tz6b_JVt1Mu!w=n`ykNM1j+Ihj20(9`o@Z%8wLRHRGT<4>=3o_|Dw*k4q+c=9@*=R=I( z>)VsX%hQ%kJ^t5DDHEkbo*L`bW(V{t>+$qpG5^2$rUMb4;RADB_Zu;jIl|`0bLB!- zhm$$Lo%lR>`L&XK^nYiRudrF43zv%MN5p-C3_F6NP`7quDlYSM9?o@Ngnw^IF6xGTxS1^P*ta`D4DTR*2VoANZBCyl`74U;Cjzo`yw_fvo&wZz>qm#R|GyF#88 z+PR0rO8%DFGt7?pcxUa+o;7asnWW^g5x%-DVtD|NhV>gj>~9x+bHFvJeIYxoXaiV6 zf&IN{T~sZQbKQ1f%^B9BQkzNYfZMfJBX1%ZT82A5=Afxfas6}nx~9Py{$rZRitiA;R(;L6UA##PS$Y&{&*SKHSn#tnZ_5qp!04or})jOI~^4GoFXg zxhG^>*8rEG0}xCCk-&w3g=kWJV z@~}W!f_csI;e#8QAaZEw_+Xi?ik;4JE-=@2Bw!;eW=AFQ<^hl-74zq0S1|OB{6R4z zINl#+tM8yn^}No?t4tST&S?eHoyuPk&SfB#18kd#eRpaUWgn5!EWs|#zv^fbKG42i zS_`o8-Ei@GGo614Jg1nuQ=cQ{^gf%vN#phE(Lg@ZGq<3Oh!b#sBEUR>WQ0zU=ZQN` z7$7scR<+9tM#4=6xEQUj{}D>?2eH~YWL#xgUJ04Cuw96fUbJ;4xW1)o$6<*?H6&s5 z;r~^*K)ZY7gVZF5)`36yxz#!%$LhiM5xI!m$h=<}U`1}6mx0uK^rXe`(5+$ivV_+J ztEL6Oj2FLAb|FaTlYI9v*(svkKa_}8z$>^tCgVEYY;$}m&LsdSOw0oD(U9#}n~3kJ zuA?o?O5K_n&ja`iW2vsBho|!4O?59A0Qv(RZM)(vnY{I_dfN*F#KnXqvSCfrxICk zG?$S~U#`puR`TZV7IG0)37LL%+@_%*(|Oi`Z0gBJOe4-i{%Ht}4T%{-8udyb&3A0} z#1rE(;MB3J4&N>=4y=V6`eKTi)iqAg{TIu`zFH1;NjaJ-n6L|fWR_aza)zm^ttp8a z@G{Y(`?}9l4BXlJmS7@<4L;V5Otk=|ZGdGak)28yGxteNkNg)eCg0>pk(p-LPb{Gr z3|uv>^vW|+kPL@=d?zCQA=5}e=N!XmAm5P?A%VQF(bx9a1cK7hK-Kh3i1eVr&@v>j zKNuikK=+@aAaVawGlFX4W`iL@FV$Ap>+kJopo_MAWmBMy>>NG0Jw{l`T`x%vhyC`e@{#`XWk><`pig~O1Epc}jlvLc$S5304-8;}7Z=A&5pa;2HQ1f&tNWxRceyrD& zjE5}9I>EEgv?6c*u41*a!!fvZtRCo?OKfJz5|V3;JSDSXHU+mMQx<`xKA`k0#j zJ2xMT_6cY>@?4m~C94q88i~GIxWQ${K&S^NE-z5e={KEF0CooT#W4{5yWbGT_X4J6 zF^Nf8rKV*X0B9q}IH>=ZI)rr;1m>gzb7~QM`}X4m!kdP*YvE2zRtgLtiYDZF-`M59 zdNLANt7vDQ>^7}X&npQW?fa`;94{P%)Ojv!&M*#h>e$D>s-N9jt$Efv!7>hi&2>FZhXbJn0l}7qF%jDE-ms=$Rtfx^CJoAqdK|TOaOV9c z^Tr4;zuYRZm1;Y_`#zS8si04~D5jO}4#@SS@9GYiy3MZe@Q``?``x903xQPsa9F*{ zVqKf^B7epssypZnW0-rtiLI-oi9&r1O?UB&aNGUnPfyJj);5o3*MOYlufAhW^`jmP zt^uw_@-;G()(SNV`V0C1gJYK$z0AFn0e)B9E9f$?d`Zw(VLuD#yqZ=T_fP_1V@|6{ zV2JtmRA@U}lG%`En*MZm8bcNMJVW|eqyO@JD84(I!uCN$McewiG%w&swTTh7wE3p% zl&B}tAiv$hoX8NS+`mcT^RlP(#O(WnKzKLb|0RSN6)(dfq!LBPBt?9b!=A1rneGz4 z7$VY&3^aQ@bqdz32y$E>*UNnO?gSm9LC9z zY&^pMN$_k_3_Rl!SNQ4}4gwNjAR2Kt@83*hIU?dlUrK6Y|93<%!5+46+zPilOuF6Q zF_q8vxk2a)Npw5?EH>6}{oH}+8AMnxvqi3ju5gM9B%Su2-XF^p&rc2*Qj^25M#8QL@?7~b3Z%Bym_h!>C_8N$VPd+{<=ElElh}kT+u~B~BP@m72 zQ4m?GZ0q0kQOeq`8(uZLn@z^qYcW{w!Ms&W{~VdYNUp4~787d`k+)I0wUin>)1g`o22Zr_i-c8Ut^%!0`V(Nj zLyvh!QcErkAh-Z^!HnoQ&aC||Rqe<-rhid4OziPHRK;M=3;D;5X}Av5I+Q)qeP7rI zG;}%XeS-ovXFg{ZEDD{Ft*)^;|6w6^y&j|QY_ZJSWA+0sI>u|4-H1=R0sB&{ zNB9h*gw@h9qHSq%hW-;dTG)XLHV>B5S0@ejfS1$FH~$GEY+trJJ9Uzv#IAX5(P*I> zv*tcGj)O5wbTo+grb8EfSA5p)YcgXkS*@4PTtzYCN1B2)d8*aMhmf z6{CIzK^lTfk;EN(Pq2I}dxV%v*hgUg6`MK!O+9nVZzgYOis-_U?H;ryS^N!pm%<_s z^nSCJ(4|W(Ml>VBCKKlnfAT29L1@4OTC<||f-y=Xw&ls*4)5yQ({ITUhYv)MCGcdk zyd6voK%^b#`uC%$!TW1W#K2~jUtzL+_3G*8-AN7C_o8}umbP%1%>n{_M=;$f9xbqM2YtZK&j19<@vcb03PIzS z$f~)i{_y74In%ChRga&ovV(^f$m=P?yMPk7V{7Qmv2p@K0TWD)O?(G78En2F#)lYgd}E&tG*73!i*G*>?bMQ-l0I zz=_?AQ}XL~R&sKocW4@?;SWJyCgtohnhYxMy`+x*Pw=}7RrCKj+K&V1)o-TUOY_^D79)M&qqD<&r9gAIT@VlFP6K`H*%2TBPsOB(-| zetAF8?s38?=zcTud7!u0_Y6+RqfzJ?Z>%N&36n(KpQFhRfveNR{o>mz2-tQnR_|Zj zIRA~2mD;sxnl7_-5Lp|4lTVC^vXQ^DutjNljZwlKPqL{_SrxQ^UQ zW8nA1UvMA}aZ#I{C`8>pz0r+KE9CQk7f*z^`(6!EnUHD_O^zOrA3AsLfk!QSAmWJO zf^hTFLTum-Y&jvxG6HlV;a(SFyr@i{_E@waf?v9|lUNE$sGmbs!`&+B6emq|?9|%D z(FusNuGM5B&k?Qw-MBHm|5!adUjp=F^LUE;n{Zk(V%}_lZ9APk zvCfXOAUxv0c79ZDG|9}G$4|>N*^FRLH0_+XIV0Fo&|^+~Mh7{yQBJ`rM|bR5OM_gZ zsCx21P455z>>;I^Nn}9|bJ^G#_h*%hSnF^T_T;930Y0`Y-=vgAJQB3k#GlVYSLAyP z=TIZzAzztDsPdIENG^xX4i#h+W4-x+v=W~lB(Q-u<9+kVja2Zh1F~14dtRT-swb-6 z&t~tMU>lm%1%!yg^{uxy;$HMZn517grR55Tqo4qb1M8n`!ob!g9@u{8{SB2UX$}gu zY^h}EOyD>KGmi%p%BU#W!;Lp%edBdQ_3o`SNwm-Cr`U865M7b~(bDCSs0Hzg$7w_) z+LtVW#VR!nJ{!)@Gvqlwo=H=W7*T?)ByC|mS7fC?jEW(ENFC2l#tol0wgK&<1A-|N zG2=y(NiD@yhWogFB@pV+u~_R~yLyS{!6LR9_s1Y~9qUHs!^6rtoO!c*OdBSUs0SO< z0SPLpj=PVfKBlq&p!ZNElkxV!^$&3yQLVK@5Ymf`&XYW9p$k{~Fb=7!0s{0{4YZhq zYB`T*tzAx)JDKXJ?IJWNS_m(y8;41jybW2GF04vUeNR5TCoI!8a*>WsHaBn3aD>p3 z0bF`P7(UHNRX2jpL_stt(t(kP-%U;;Bz~A$HC}ZXy&P+g1k=~lMVwTh5?AL2*1l%_ z8j}R4_YB|f{Nu~IT%i_I){yo#= z9TyLOR#(ya8FrPU(`icQab^&H;Xj6h*E-kgO|p;~Z>08>S@-y>R4R!MxqZJXz;Gb% z#QX`MD35zkbyvk@es&T;&%UTVlxX`tH$xGge~TTkR0oUCw-NQ&&LF;%1ND}-c^vnBx{%@rIW|dCrRz_Evv-Ma|A=tDTeDO#*|c14YbmksYd7@6Qt4QF_QV- zxkwtZqq~^zLyT5`OK_H)0PC*UpDM7r9sF$eslm2gq*k}~xv<=BMWS_EU{HpIV7irm zL8Wrc9)1@_>qf*j2)IzFheNnorC9q-;o07{guVZj&>K>{bCSk-IX{HH*7{z@kG#bx zw-0;M2$s-NV;f*1r5;`xRxL12pc$Fxn7{bd-_N=Ww4SYOMq&rcGQV&CtPUQ&CV7xo zJ;91qO1F*3evy}~MI&&?uL|z?*2tzK;Am0mn%=wgH6pv&U^rWQKzB?>d(xcONZOT+ z{lFVFy3GTFbhDdXA0hhmwqE7d7bavxwoMyYI#%7SMTH=XrUQaly_gC4YGkyu_HKv4 zY=KPlS4985XC+_o`=?2CJzm($!&1+W4)Q5}+XTtoW5^)t4X zXNS3u_8+U0PW}Z`8*~?M;t|_X31Sad81yb8eL3AdehM3XHqYyTH?N>tMf;LtUSt$V z_gL*KrT%()^|bj)X+4!IQ}5^L=kL$$v?0=Y(T_na=t3{#@h7U?{ba`xoI*>_C~*d0 zD|5Ye&>kKibsJvn2qU_z%6ptUWLM7YAAMG#v9~N%PxzIsp%GN1Hu=g+4DJGO9HH&+L44J=HVx;$H!_vbVcA zGsF%>77NK0Jt=Sp#^|{@jER+bsgwAo>1qw7#`B%iKHq#fy0YB4vouSM9>g(X1vk>M&wpd!ZhNPTZ<$J^o;?gqtY^v^5Sr4 zt7j0o6R#zmH4b=NMH1pO_}-05n4k-$EP20eKJ9*i+IS+2n&9y}7@4Ads{qSzCSSvo z?`(l=!Uw=r8X?Ry-!TWH=BV6)gk8<$C#%xGb*TZi=(o5b<40ac21zc$A0f5d;) zGmy48hRasz7k*sTx}fAmC0p=|9e@Aw0xQn?T8r@oK%L?Ckp2GX8|SJW;7NxN7H44` zPkxx(^dXz=a3aM)`8yYn+5W%yEnh)wwE(K!i|zzxJ`+w{+EnWO3)ZBcqKecN zlUIA9BT82(b;5m#bLFI8eRAN}$B!mplH@JYhCP)kn8wv$jo>;zGM-(}k?RKiV1O=4 z0@b$wWhGK5yit?`OV9DzUx^GNbEtY%(v)CxGqtl-@;_xJ&M~HOPgV8m{AG}Rd|(pLNo z--AMvA$`c@DE=Dx9H_@}1t+l5wZ`vV=f&Yl(}gQ=*<2m?&cMf3XjHp1@=uow6+}S2 z`gMd6A?Ei+Do~Zgq)^hKkb+W-5j?^sZ_ys@41uZ_#~H6-Y&#rw;Uhv&mTrGcxKvOhw7K zd}iOlGkePF*T2k#MQyqt%YNHoURD%IIA*hgj-#jwKO6oicF)*>)v zG19ljL#EF!JMwaw2V7#BxnE*RqoJq$-0paI*=wD$s)`qqOVXv;&@sUr*D%i1IonDX z1mSw8wK$mNxInz7_9X#)+HNbfTzyhISYJCq;kAKLyKetS)Ao;c_a4{KZ1i>1t?@d% z8{Asm)9Q9%%h1;2Q%?+;Q+Q}4Q><&wY*-2-*lN-YV{3FJ@SFP&JV1Hj8)7MxiVXeEX^69MJw2%Y1WrKdBuIder?KiW)SH()6O?TX>Brn>Z)-?#aS%bI0VLv1q zO+7PLvk5@^4^%Rss{H&@iPGw+&*_O!a~{Lf zgK~!VAM<$EICpV@skHKU*Xw8@=a7XL&$%(zpQjFn4SxU+8rptk(7g~vUsxwH{$@$c z?MV!wSIYgTP2+`YY++O_5Mq<13CHR<6ySHX1%rB10?Kfe9}<-r*da6(hX!zQskfXqBIy!+X>R zV$vU1c{as`|Jri*3`DypPF0YWBOIf>gwt{^wwpGU;^MWQ`?W#x?9%kUTfK zoo)k$W$!f%j+J?dG$Z~QYXGl>MU8XUPnR&?OBZLC@1W1+mbRNKYL{=GMryOUjFI5k zjl!|AAfcdUr*^Hy)9v8!?4sxS9#Eh^BN`XIua^G2_Zgr5R90;?V2|lcR(XxEh{`CY zsMazHTek=tEGP^DZ^Skh>f__$k=w-YAQh zC_#j|s#11P6nppi7~xef6Aig+;OdGGh!c(S&v zGOYj&&hTD9Hm7jNY;mNs{Mpo(ew^$PYzdi?nrDGkU{&ztYpg$W3D5m!o#WAGiMG<< zviz&iJxb(!`oo}zBA_?Oz_$RX6#sN4(CVTDLmh?SSX>{@q-1wIX0-O`@zBZASS*8p z*mAN5t-@dDERI{>yUN}{!Kv>mKIXY_xUQcYJTVaoDKi)pPq3C9vLJ_>@>c%wvih zp_p4jQ;rg!6HBvOYBHUUc0}Xg(C#jsEZ{I*X-L+1vi8WAv#`k5gqQb)tNP3Ma9T*190tFHTE^Tam#5nO0sP@`eT__#;n2F>w4sK;rzz1z*^3=$AIbR4^!xfViU73C{etu~K$q zB4jzpW)(W_-)|;mE(Gw;(EJ;An(fXmm-54-Zu6MVkE|g(_blHLdL*O7G{{KSL5a>K z7+s0*aM>Q#>0RyLkM?OsCEpGrPRf}*@PNgI`dsS|B#khDNA6cAY~a-{){?@Fp_D?m zrl4jwB#nAhrJMihfYxP-DQfK>H`eLP`x~GWO6Pm9bM(T6v)_Xg`J6^;^D1w@Tt|7m z;4s9d;K%(B17J@-uSxue@R;IQiNV%(8eXX)d%M>{SVvQ_rw>45J(#VKY6-&_@Rd`2 z`hirPo^`)U>3L2<2>zdjn;sWV+gV(;>59ZxwLNO%EG=3@Yb`8iOI|NZoBPr$}1|=jCpwHB+(AXp8D78~d z@B=?$I<;=xw6ZM(o01 z6a}oCKS&Saj`|q_>E3RE+Gy3s)Nuuf>+ThFhTG3SZth?xru5#7ieBtfNLg%IL7i2rDvizQU#`DS7CstJI#-qrM*(IL3}4mdDC zo?Eed>5FIs$eP2*#AHCI`cerA#wf>~GX$%v1X{!}H%{KoOLvgQgn;O~#S>(p26Uah zX{2@JxL|Hkt#rpR%zJ_wwQdTAtM5ARA_+W@{fmIIE-G@*VIB-lQIk7AUf zC)7g{&IKPbn`0jYm2#=?WC|Pn?!_t@43+>W&n{u`%V>~M z#RMeG@E=e;0ohkf?ZUSyn^aDH6}->+b*Poc#ar4XFgsagC$`>x+od70=F68t(Kjj!EKB=>JxNdY zaOMuX-WCcrm77?PsW$PhlNC>gZhssi`kTA)kSWLN_=%)HRIsHV(pl^`>9kiJx(L@) zPw1ts=RG}Z=`KpD$}@5INQZUbbstbPOZ-?^&VQ)4=hECpE-omw#2gS9yHC7cT{K64 zBIp7iMz|xW0^Vo?JG>tzs)$-)_w7>Iy3UXHHuE-fPc(Hik*1s^enk|qX|i&n%!A0~H1LAeyJeN=3p!lvFVGA`q zSf8}?C`9%Hr5^!#a*V^OcD_naiOp;Yd1mwIETUWpp%C#prW?C3RTMKi#PW5?NTmNN8?HiObpyO|1g7YFhQD>)T>}H;TrC zPh6@HMMx5Pn|g2^-Mn$LZ!fqOJr<~hCaHcf9I?Y3i82TWsJc-ggRmonYiElo;M~3= z40-ngFL`1tI}(wG)4nb9-ag>1ny|5q%QO>nlyS_cfjH8T=J=StqAEKrAJD~*cZ+~B zj*m~RB#6LIU@k`=@in{R6IoL_M-(uUGeQ1cg&fF|p}jXSON`hn8F={g8yKsTp44%|qJd2J#L}1k zy{hl0%*vagzZA>IRwAzq0+B9ckxA1I$R$3;=kY2w3B|d>@8w>K&1|zTGK?y%nwvnc z044y%h~ipAWzp%#$CYyLIM-yLTs^%6@HD%HG;*^XR#Q#J|CCO+IQL6w(&^P@lN0t&K>s?G4$4?F~i_)fO2tKATy%gvQ@da(bQ;1I-lEWripsYd<)S z#TPJdv-o6YJx*uLFTj{t5P}U2YqOD}a$<-hE^>TERDhWIbg^+DpWPxMh|(@Ll2kla zc7n9f`-ISDp@g5b$)@~TWV(@r^6S3{V4{X*X&ti%{JC7PS*=3I4RFK_!D62WPcL6R z{W8qPZ$bF&B8oVK?ya$x@qvbY<1nlpambC54O4lw=84n9o|>ygK=B}T*IpGUfwM+D|sA8qF3YA)`Y;_okT`y^i z*_BG8PJN7|(_+4vh+3vU{B{^o@F!zH=YR&h@I15zb5w&U$+?JJzsOed(UfJ-Js!yx z9x2sR=32A4FiV*?k39pvM(4R#h4+60!fJEQ>HHQWqn2)yFo>T5qUn6aht+x>ql9XS zO#|3!%@>8(*IGd}T=zqG*Z;tm*KgbjRKA|hV%+U^rF7K#oLD`a8b%JlAr7&DL>TZ1 z-{Jrk85}9ie$zC?BinLpoml)Hq>+uAx1-{IglSCJL!O2!Tb>CbQR0i$ESu4!59VF% z59Jzm26IdZH@E~|`{7C8^E%rf-Dl*!qS7f6mGk2ogm zJ>W82;7)+*>5dw0+nQrpz?-8Cs0rgXVi#Opes#-c z9_w%WA!Q({T}>S(A-#ylYLteT%?kZE~fOEjPt=?c#Vsj-KRXoaEo2Y#0-D;_~s>TXa*$c`~G2&$`bP zLHkrlJ^J-eE&$)pG2`|D4=AEPRPn_5+!HaHm#rwNK$E0P!_ZGpJ?BZ8}U3R;Obq-U|?YiAq<=dwKq$Y!R-A&hH z=zs@8F?gq~_e5(|ylt5Bo(2gfMComfSb{i|DZCUhbfwEU$N*-v*w?%0hyi^Nz%qH! z344qHdGXh;*F48&RrM>85$EmQ2aSBryT^%Vb#CsKyXgQbL#H@5tYCpns=I}V-pr2l z`!UfK!fo!w>J4k*mwA|x@oXA;E!}Ff15QR} zt^6NB>0hnYT0g(3;QGc3WcFj5#10K|wPDvthl-Z4igT%E#C~M_u|wsJoEM#4jpb;3 zLf6TqRFD^%o5S+IX!sfwc%bywv}tRfz?ILrdbqH156B|P&jW?QjPw*DUf_1O-ywjn z@utyaNeifAIiEHynr-z4ABz~ZI@74DtMB!U6+`?Uz%M`sUAO9`hikm56HqOG($=^Lro5qdry%&H(m`T6oL#jEn(_9sE@lN z?KteOEdS9?q(?egj#j$RR+@#$C|2|0?Ami;$g0JO-rWhJ_&XA;y9iYGT(Y}$+^HO` zJ+d2Yp1`m9T!*V4l7l1-UwLqi*&mnnf#p^VlA?k5e?;oD^$x#1l?MzJ4jHlsr{ zlhsnm3KB!MA}`W=Tj`m|joPC$e!FBvch1l9Su;q6wIdYDnXg}&>W~Q{+M1rzc>mP@ zkS&gS>1u={nh=(Fh)-A(pBE^%sc(5jVvUfYC>Ral6Dl)wLVTN>rLGAv?+1OAqL2p#R1M zbFxRh=kBW|CsSpwzf+)aGmQ97>rIJ;btN1dXtRQYBIfrttG-`Nvr~SBsS-oxOJia0 zk~DbCudjYy1cnKsh+!qsSDREHWa1q1b=6vchb_0E3CVQ`lzdUIs}Mq39mT6HcSicL z{#Nh9RDb#Hc^njPx)i#FH+oZt#o^CWoA8n*Qhy}Zd9_HvvWq`?ApOK z*x2IX>pYe>n6D?vY;=rc({JuCo+!#YsG81q*XOg{#8O&mdfSBC^gfa@2YhrpT+3*h z3U7G8X~lf(=S|497}V*C)aasQ*=OW_IjRTVF1IvZz%m7v>HU`d`hthaQ+XK8m! zB^-hRx0rm4><51jy!&m1{o1Ge$D50=e|-4MTf1~qZd!hw#>Q?H<-k=?Cz2ewo!^kI zlHC?E%)&kyOTqE9o7D6fzFu2JQqm`C!8CX0Yp3(Sqz};M$}h2xBMQ0ko!>H!Nj-kn z9Z)w1arbea8&ka_6s%wZ1_<}g<#9uHJ36^35>U81J zlARnJm%+H)h;4`86AXQrgdYe<56S=jS!q!WH&!APTKbrJiuBef@ZKrId}IbhVH zR>Y~?&T?3+&Tf#X_s^+?i2J}BC`1&Ptx^B+{%hp7(ty|PLM90^Xx(KdB_{$#UZni4#; z>?Tr^die{ufT+YWDW`f9o1q+pWiHBe&yxpGCV{-sDYJzY6{fcmB%4p)viDLkd=Tj; z`JqVokoU)NGEuVSFTbo^P(Me{UZ$OQH@}Qo%2Sva0RJxg+`s%=pm=zMhx^v$1CTuL zSPD$-98dzQg0!yow|x_78O|T04Qjfn;Vx=wP{K8%_C#KOdUs7E3YcJ7QoH^S@}-6* ze3ZL6X8Zy=eDX(xS>IYe=>VsBb~KEUWVL}&)J@^3Atf^7cXk34Vy({087A?qxIqJG1+ zjg)kE3P?zIHxf#Vbax{SOGQmEJZ@M;FmsZ#prm@}712<#lfA!R zt99CY+w`|CuR(gPlo`ita)u+TAp>5v$896*fVg4NZ6>nA0UW}xwcwxEw8$E@Qx#*0Zevx-j5iS?CqVIwXMQg`V3->7YU9 ztUWN(lRR~YGQhziZ4x2}=SgLw=U!>E&F&QHl*3k59Z(`;-T{)K4eA5?NL3{4C5HlQ zNQ2_r;&E7iu#ub7F53eGvJTQGil*n?qf!hgT0oPI)WYcONf6<~sYQ zE~a-Hb}x`;QV)=O+E<$e06y4{3IU5@$L6@ay;7P8}@pwYi+3-^_sheT70n2~nOycQs5?EzRtpwgIzg(T6DAywY zpU-0j(B#T#NpW)?W7?g$qPhg%Q!On$h%dQswCs9p=N!8e2M(okm|oj3}9NlNF%oUjV9`~=Es5lSSN>ZCh4+Hhu& z;Lt@;az3vvbwHoKvZjJw4i9ajm2C8ffmq5oSO$+C5M@6|!&UU##H+*(#s$t|dX7tu zU+jU@jwBtS)0if``*&RQAo#Ez=`+*0F1F%X>)za&UU{vrqdbA2Zsp&S`sq3P#gVOw zI{j<(xMrP_uPt#E$^-$&YrI^|Ouzngn=rqsq$Q(_;-O=qtHZx}*BUKw`G=!r)+v1$ z6xo<|JztW;@RprDTKe0-W)kDu#P3y_F$}VF_WGaDaN2qCr1-0og7dDJAIB^4aRM(f z1a}k)zXLRRlI^HL?-p6d))?~8tWnF@mU#4dw&Jm(rQ&GQj!F`4nmS<`5fCpeiY0>$`X{mgtOiVTDbvUTcKAU+WL^NrW09D1sl5WdB zkttm~md`f%kJ+U-<+y~HF5r-;Y~^py7YfHITWw;fc3Q>97K>i+!Sj-xtX}@t>aWg! zGKI|{Z(P%b-f_}O8+aUuC-J3=(%7Y){K(HFNl?PCQ1!;~U!|P?)R!rQ^qDq2Pkhd< z^H2PC?u=PY4j~l_A8D%=;Pu!bng!L6*fblzv}eL3LsGE46v;~F~##Q8o5Af4l6MA zn{^~2hDb8f1oj9I1|^_JPVZm;5sl_>t=!q18#Q$z;yhvbOx=ZYPClwHAS2Z?f41>$ zSbWp5#6H78ciTdL7JhDGz07y$t40RIga0kH+`T)WSI46ZzF;Vs@H!E;48&Y(CsL^alW(rC7&SgKK1r}lzPunA03&b z?K_j4&tmFlkUAb;(7Ag&7JAS6X`ddU6s~>@R*MuR%7=zzb%c+uA9q~D3mp*H(|GVI z36{2p?%H?JT_Y3U?@bBST}?;1{rQ6X*aBh*&Rf42>oMK*U%#Mig{SO1SPuX2(IDYN zk^k)hS2abrSpc0seM~=g+HCkD8ZtYeGrBc6BOHYUuzEzHtRlAf^>&MbuX zU_@U9k+;R?kNx`Y?Y_&qomOrCjHh%AZ`i3$YovF+WxV*tPdWPI&b65HM`wn=UlR;H zIGmfB9G08IO{ty7AMkQS4rqm?9#Iy3KC{hv#pJj&SSPL@wySJz|I^MXUd%BP;dgjNQ}k8 zFZ$uD%(xtyli8Po%SSiS$gPYkG&f(|kVqQBB|O&*QDjx-M7(jQ{yO!h_1*q#w?~Lg zjyRY&pL#ihOX`ViH6REr`1w3T)Dc}xwalhL{q#NpMG&cYP5W;d2a>t-se)a}scsVX z6rUHY6H!C?+2d7x;?tYafy-cx8os4JD{MQRX+gV)GdX?U{!N!Od;R;v;pI%+IIbGQ zok#2?KLL40jieNUXm#LC3>LRw%Xd{J2a%fk?XaW$_FDDGU)zD=T+Y#mGIf9f1>ytw zemE@AZqxIT7Qrg;IM@$?IWaIEJ6o`@0K7D0YV;c*asImg@uP?B!DU`FZg}l`RcYmr zJBhCS3a~Iz7IJ#;7O`abDHQPw9R>y_F_~CgtcISAS)JnNPkoPl*+e-0!$X@!?_$~5 zHlMLsDyn%3%K;Y>3{(l3gG=YJlom**8m@iq+3Fq5&91t!%S%wRm*U!wxnCsepJOQ+|3xUt=!pCyA@#Ch6=-TKi>QpNs zDT-+Edqho;iU0-YwPC#N@D!*qZ0x}#^;Mhal|ezq#K~*QrBC6lMzl>>Yub8F#?6JL zh<2faKifvUP&(UP8Z8@%(c-<>0$E+B1*<%qQXZ)ILRihqx>XZ1SEU|Qt&l}Z&C{*@ zAIDwpB)^}$4Yj}|0h0Jpxq`ed78@*Spr35nuGy<0kH-wrGo8EqIA5y{h1N2Pegcnq z(V!O7{C`=vUZ#X(7sk6w7ogkrA0jihbrAknjVB^iwg5M5pBXY&&h_Be75}&TOVX;# z@XF0An}hbfup0u0*CA)kt3ee7sV)ACjgC=mC!Lfv9oG~ks@ahl8Dt}~Z2$me#QCvH z|2C0Kv`wW`Ot;D)B8HH8{XcxCbMtbfPL;v*P*uQLAL-9fI6Rb5@Beot9Wyi<6AH&H_C5{3Iz% zyl^;M*V%JSPt?JeLsEqwCL8`$kBIEfoKY<;Y{-iHm{NwT`{7h?W>WZNwJ7mzKjo^)7PsVxaY`dQ6E=&#n2mg&O|Cx_sKoXs+=G7^zUKHfigI7e;B>y76zyATU z)cm;1X4pXcv#=Q5hnS2{*Udbh1&7$ZLmYub7qpc#)$Bn{N$#tn$fJE6uVYpTFW(Bl zm-!;|OV+YA?6<|TP`~^g{M3p=Mzh=VF!=1%V=iU_G%%8sQw_UMOT#tgR?S|#{R#q>sNNqpS#U@?aOt42?C*$J!U75QV+eaNXe1M2 zsGEeNxM3FvxcU@_^!+$|Fsn0akW+g-G}R3FO-DV!7q8J6u|R5NRBs~s>Pe24dJl)a zNh%)ZX1%HPD!+TZ>sK&p!&0PP;~uq1go{v6zY#Jga3^45Wgk_~W7@IpY0NuGPIS{h z0Kr*Cv8VCIv0e_<@?R@1uoL|n5i~DpKo8k!b&_!Jc;iKK{=L-UfkxI=G?s%{Id|Pj zU#yjpy(FzjW5L@R##dM|xzp33c8^b74TwW<0HlWx)S}~9=^HR?e#7TYHN&gxcB`m_ zL0$e4hc@xYfTogV-umvDlCcQ7+qcO&*E6d4oDQW6A}LYt;((HMAaHH>_}IHeMnVt? zGFQfqmxXA=moj@bfavwf zUILfcABgFJaeQ!|hv2i@u}D_w?XaU5d_3jGVliV@gIth=anM8Kt;Z1gVS z0Z1Ws)JE12`9Ex^${W%pt z81C~?hW7yfkQ)H6Qc0=o)@xJu^-Hd>FOKZ9KCkzh1TZ}PHoRu*wg{adyj5s29Tu5R zhrKu=&Gzt0h6*SH5S{rQAnFox^T|D*)zx&7zy9RHl&>6f9cxqU7eeaOe6=Y`t9x$k zoN$<`v0v76XC)n0;>4;bP<(gk<+qiq!n8Aq4r_=-HMOW!I2;&j&C4M~|i+ zmXGG|!}L8KIc+{uV2!QGctNH68?)tCy+L7ZLmAmDRnWaS3Nx`O~VGrQ8q{;LCr63n&h zy3T8K<$mUCHAh3utcYUKWEF9~XC6{gaO>#kTMyj+H%|kBHY%^2b2XoL-Vp8BPD6Tg zZ!dQ_QJN@R41k@9r_|H?`yW9>69mw6k5SP^sS8vJ!&MY|uin^zM6=YVT!=Tj9{bUS zzkC-dkW(iPE&)K)z(#PzFjc%^k3@NE*LtE~hP{|%f;~p(B%Gi!}=MMe6ZjSsb{eUZi zpvp^rzVPiA%y{Q0=MNvysm?nf$>Uw6wYkoVP4>rdpb>57{~Cwt))bq|nN-%HH8Uk8 zML_DD@=JS&|8dJ}YB6u|Dz+?((XE4;E)x%y*Z=#GYX|ST>ALL>#~K|vAKpT2`m!S> zUKRTS-O7JK#-ZkgM<4AB5Ax8Zyi7;HlSI-{P#PcBjh;s@xx4gkaim_BUIOOlrBz3| zUfL}64wl;-yM!bSy#!9r`gMw5`Wa6z2yPQaswY0JrKnfNzJx_MI!!VKuNs+(c|Y0) z=mgu+aVKKrheq8C^ydnh#x8zoC2L*{h}m^t@*B3^1wDawy?R7;=MSii{EtMyt&94* zO#5?tRQ%3voYi)|f}Abu>fpnnhVk3-hVk~jdAr4Pqa{~W3Gu*S!}!1r!QL@{?D;y& zm%b>y=0~JoiZ}{>y=_Xk!KC)c-GyK7n+t0!jeD~P5`$<(>Uxr?FYtMP zN=fU$%r=>E6g8oqpW9^Xs>Er;gx;FB*?E8v%azwPwcnmY3nx_5ov{RcGrsr;H2q+< zRF=mOOs$d8c0(LOi<98Nh#M{r%3KW3(Ahm8;c53U$6`g30cJNjeLw~j7*f$(N95@= z*A27~Z2h{?4(Qeah zfxmV5HI|Og7@vvy?5nN2Z4+}hJ`Ehv@e3YXgt}~-R=e_Cdc7skqCYi>-r}aAPWq-y zn7pF|z25(PxR6mK65p&#W~Op;imDr4zCak`@P$Y)9ctlt$2wweqy_$D?v{XPx$R@Z zc|pZ^A!Fx61>I5;{g7Vto*L?j9kuU4>fpO?&GI?A9WCO#9cF9zT~V=aszF}YJrRb| zgSr#R;1kFKRl7%u`2+ zH!+9d#*j{xtM%|UxY9h($n^{byw>gE_OEZ}r^(asI2gnSti@j?EPDQEJqmSeKNaxF|3lYk>10+bRB=AT{wCJ|b z3^6MGl^7XbGO2qF3`MX3xw7G?H(&-CFUk9O%$C;lF_)(v$C8L@@rmtE)Bar&hZ(Gs zT}g#l3>*YnK^z3(pKgbDXY$atIA)}z)%x80S8GGTEMNs5!vXb7w?4v$)b)=_s%&imYGj~N-o z^OoR;fwZ9Fz4a=^aptKWb*t8A5jWnX;r{MAGc?vniKsxB)=~@cGyTEI$qo>NS?@CT zqE98ntE4kC1ccwZca%yYx8-a-N${Y;V0j#5l#(19)G( zj2QaXxQ%vNq>H$a3g;|OnC zU(FKHxHah=CCK7{)c@8%j*jBQfIKD60>DPzHuMu(_JC&1r@D0 zSQlRN3hGIw3phOG8Bxhd)4LhnhrPV0(zOd}+dnna{O$*owd5`fh6a)Jt&y4O;83sO zP07#=_aEBMHV7Avoo4#Z^HbzIrq{Uzp_ z$8Z49D3>{_46rLK1s`Xo>($JcBgG?8L9azdi(gt9DfjPZicGg(V{f-&M>=QvY^8^c zkZ`A2Qfnb_U;lOPWH`tf^0VD%PaES}u45*Tz29}YQ#sK*B}a=_Ww9UDS97hdodFv- zCp!l5*RMhVL@boRW`SFuuBICI+EK;~<*!p5LdGgB6Tbu?jQaa<+o4HGGD|Y{(CJZm z4|Q|w{5d5OkWp=rk|fM+T8p^B;Hh(>gA$pqBmFoiNLAOHJo%hFmBYpMxj|M7Gwr_M zcLyz+Rf6P3+S36U`z`C$w7k<_i$JvlF|8d9;?sZ}$Dot*2i=9qXX{m(IQ7mTSSR&& z1IGTwe!JOJK@Pl1%gLrn|M$?Ck-!Sve7)IrkI12CpI6Y}rvuY1k1lmMw;Xp8IiCV` zlh3kOdSpV-K~?i+SDTbxTqzQ-bK z*NabMvu#4ByFVenK1iFG96ba%!i90D5G*Xd;!5#*fF@P{yK3N2Air05_ST>l=0Z*h zISHNJJb4qSetog*iZtHUMUMHm9GmRO=OP$s_@K$Fl9<=)V1X6PX?m>4g zpKc^Po)P07mn$xK9f(fX!k#-@I%z=2Wa4B>AMR?9`=uVY#46*nLPMUiH;xUtsa+aoXGB_pGnD@;vM{qj9%#%4P76&spZ@^t~BK# z)Bh*;jraoJT{j_;55&9lu4ogTMI}3WJ)WG!KE_U>fu{Wk%xSN3IYzaaQBHGS^D}aWFRxWgMZVRy_q}tnLSD#C5x$(x>GHF$CYx zb*ba(yM?{zCkyqNSkpy`ZU83(oLAm%*s;=bddn-NE2BVo>8nMk1b#@JkPwlHr~`SZ zh(9{j!FErFMf>)<#*niljQG{w8Sy>kkh-OV*v|#dQInh-deg=*?gc|!j%k{yjhCNJ~@b5@mY;C@mbK`QOaD43k}A^HY8JWO!#;!sOcw;wbwgJ4Zs2snKhF<>0T2B zFWZvr$OnDf<7MJEX+uc-(XwATVjw_#P}EN^;4VB99uz&T92FgDp*qJM(dS67V5a(3 z03$|oqfR`4!iFm=3D+hPzIVoUy2e5nbpNbb=j&;NWs={CNJzpiXV)wKt9MLB%^mGr zOI9P9SMdUcFpZ#~95Y=*kMRNH%a-`7YRn!yZs2p+#%IPq?nme~P=|e4HJ$wSQVi&}*l%7QsIq;3J%wfGab*g)pW#&M7^R z;2#qplWOpq*kZ`N_c(;AIxY3l&!pVrw4{H{ygAK5v>Z0BbgmKnnh8pAw#(`AYnsZY z^Dz6Nszf#HV;-`;rIkz~uOW^Uqs+|UcWn1wg))}Wa<#iivqrf?{&stKtm?%*^AlUt z*(f#T*6J&fQ$G(l2P`$%$I1~9Tf-4;n`Ae{6-lT#zd4Z?Wtq)y@Du=ZTP#P_OpPh} z`3dv(!ljs=hi3Q`&o=*ZmGpLnR$AMo){yXBK!LQm;@66kU&M$y`>aNy=KE~zbQvOc zVHT~idn_w&7g%lfc$v}6}9+3;fk^Ww=?hS^hv7WAoV)aoc(Bn3dw24f^!uN=D03G!cW zaoBW8>Z!OHDPHPJZagr-9xg3GlCtoxcW}Ogx{yAnD9G)Gx_6J3NlYjK;rA?fiG(%4 z1t4l;cRf5M*_+AXYN$@_*7{T;oxxgNCatf{IK;}f1^&DYU77fV6iVLR+KwncA=u35 z4rJ|cT*pv z_;JE}ZH7U$s~!4Us0%}&TsekdYW$F6@~>`h*h^pr+v3?Rf>H*xV-E*+n_+!3dcro! zjr*s8gZ_-05^muLo?;now{JtvVmi{a06Pf80$X|1Hud3H<^~uFF$~RM?jXw|#UA$Ho4CvqLXjE!Uht^Ca z&RhfrpM$LPE;F6iG;eVhscW>9u~N9W-0|4h)Gn;Hi_;wvd3(D)+QO#sgMLB0d^V8n$+BpUz29pl{a&mrS?Gll@@ ztun5w2#=d!6Gc|NNpWX06C;?2^+o6U>VsK&+ICc8*Eadq>KMi|liFiowYF?~?|USS z#b&8M+9u(24nstB@mIZlPkyyG)4Gj2Tfa+TAVh_3m(i=1o##JY!<tW3&P|PutsKxu4eh#5rHrlI(e-*PL=RD$+x+WkJ3IWmABZfxNHbqcX>#n<& z-39ok%eF?s$o2rPG!;Mm3et|aDeJIrH20Fc-KdsoB~3;y<=Hb$J!H5(PZEm3&~&zw5jx8%_-|-z#V8U3aVFU~)+HWylWKi!Sw0s1-bxS7D#5~1QK{^3F4F5!Iu$!@6+ivQ8~a0FA!3|1xP!Q(y)lRlont=p*U zynP-T4uISOw7uhxpGo?L!9~B8m5#=Q0jOHG>(oCC7q+EgZ2mMi#+n4^>kErB$gb}t zX@W^wG@>nr_vcb`T`yMe0V9`p35p3z00IMfs|B;}FMezS?+s;~j)m4c>IJv5G!a3d z+MM{)IuV<_`uk1s)8NDJWLtJNG`SWADr6O@@!n{f=<;4X>FXo&_eVL)QcOz?<^ z2g^$m-dXlW7OGC}YQ#Q^(a-}oXx8xfW?Pl@HK%v5UP@cur_LWJ55{TyXh7~er-`#R z9C++4q7PlZ@_aQG;Xy5@%BAtTE*%utkY2_%UpSjFnNe+txGs7w0^rQb#Q`pE$Xu$iyt8jT?9fn4=TXs z^LR~Uhlfb6WM<@zA>cG#C_RYSKam(}5y2}qT?dHsh>7@g>t6L?$voY~bll!w$eG>R z%W6^*vF&0dhjgO$Y6dB(0B%0Vf-FRfgE`c`rpJQBo4UklntUzjTA1UHWk6j63&TES zkwVxMIHq(hzl-V;c9`bj)_uiG$J77uP36Wqc<;9HP4!FzK;e2ca4|=Ln&xv~{1lBn z5oXR;4lczh!(8gjKbnWkITdp_ic6YRKE+i&JI`MX+3E_F?KWtgyU2U}CkjK4NkGbU zJ%4k3v|5~KfoK_Oxy@N(nc-rt{^1?fitJ|V@_?7VyT9fd$0w1FK3C${*pE{m4msY* z;LE8F&um?F8-KPkb4~s8L66;H5m2xVSDtXPhnIqtPTYnia7qd1L3ab+rJ{jZWdRtn z^=Ye(s%&2G_gB5u^j$tsVv13h(d2!QvyzIhL*j@CCf;nwpY15r8H(R*6&DG zv(EmZ{aq@p)tN+eOjF;j5Qhf5ENEA@qWT`yz6X9ZuOs#oLbiJFW$ z6j}3M1>xcx>l!2q4!n?4r6> zybsx6;x5_9y4Voo@_KU43?k;gFiCAGaXvSWduD-TJmrgnK3!ZeF*2HO!!q})skzO* zc26oKP}rAkP5#x|uOH^@0bsLHU5{H_I&s0U?j3Qj{U48pkg$OPrFaT~k95BIl7ZuY z>?ipo8Q{VfY?IF7zNcpF27i=f?t#d|^U)j7|B4bmO0gj4XlhE&R!gC%NtG&d@|>Qj zY&DEI%rf(5bN_9O824Mx=m@EbQ5MJ(rI9pBqar}&f z0nlGN9(J`jDK=$Zlnw`%Y<3fAk!2L7>yv=)`tN65%l&ahe2E*?w;Yu50>dkPS&U!k zo%gc!Nbd#Rb%Me}!`)n&)Mf2w0zQj&Yiz}KL%YeQONz*x*bF07<=#YK6SDk0v5NSE*{DftkZPhi;)d{lbZ0Jj6*{?bp++Xc zJZcAW6|j}_Ag7d8F>@kv&dHfb>^8%sSwzsMtN-&GL3hE*7{xU_WRzuIsCMeBRu|z^ z_-%r;d8Zo%X=QGl7thZ+?`yWX%Qdgt<5*?HuV)8yS#7dC&gp{zWL3$}MVBJ{wmczq zS5C;_)O}u)5P?0mv%ime;@nVFXa(GLBGdWn*-Y&F5mlotG)4IppbZWrBHdlI+}H(E zp|glkA{n;`HrZ|APxnT258-{^Ski5^MHn=o<5K}PASi*P)G`#vUl6?I{HHO+cn4=m zbu7LsEU$m$UMAV%2on<{Vq>sf8{Em0&OVkHFR;{|Iv7Vus6GzC5xVMY^jVEEe2w$| z#WwG+$Cu7XbIU06A(&f+pns(uf61`-`Q<+{5f_$~`!x>nvk5-%-ip}2?W}6M-_8$OO%<50*v-FYY=1&miH0oCeoplpVN*NatZWv~0GLe81T# z0(vA?@S$IApS`o*i;|YxWvNpl%j-DYd$|pMW=kUrJGUPnpQtAZIMtm=egz$Vg3O89 zzP%#iHJsg&jQlZEWz?M8*B)gUd~D&U`cDAX$UH%{0Ne=`IUc;b@*b6Z-5J&f<35`m zhV{9BL8tD;i0esUH#=;cc_*hJHp40Hy-rk_L>&eL*A1jTJqlpS1FRGRHttb_=%Pb; zmXhI&mU1u0a~yy4_({1n6DjrOK2LlAAN1{T)Cghr{?0v7B(6p0u>c(1lI&sStRz-m zY3=^J2?r#d4Xw%rlDMIO@BlBk9*U2YKn}YI`w{>A7=(_Sv-C#ZR_;r^fB&(aTLu< zV8f!B8-H3bhTlbr=91(1Ww{C0WC8PQ@}%u|B|nAV^D9Da+$q^^Q{B!*GMeXF4D#Hr zRKPQ^$T;WoEXjP1ZKk|Ls^dxdoB?kv?oktYGbv(nw$e9&DAawE-)JNh`Edu~4R}Jq z^z^n^{S#+lJU(+38P6+P8ILmvD*iriws8UpzGGPEFohb7U4E)$TA_Ka5O$OJcnEA$Z$TU%S%B_qU_VjekpwFx%iZ* zrnQTWw$rQMNT{(v)?w=iW{+DunBEI(x={Q#NTS%6jR&{Vb;Z&E?LN{=@QpSKM1=DD z9VF*?vA@2t+L4<0Z&f3gmnV$Z+c{|#&Bp7o-9;q2%2L*lkjdb>Skjy`Kce<#Oq5Hp zpe3%0o_kwoJ92g|5=Jfe2~aC6)*d&So#k6A&wJQ4SJ5l!*s*V{@wqv;VBjYRhQ;%kP?Cx7{Vv>)>qEVPufnwIV zZSWrtWI>{e&(b5KT-e5Y+BqKOQV9c^pD^N0*xtdyQ zEXc-~uT-R(jc{hkAh`2lsoAR0pUHmJVAf^0wS+t4VcYKw_jN+d&L)UVFbnTT?Hoxi z^s#;M*aFD|FjEE+$qi?tAwK2|@8rMH+9w-qVS4R^m+2A;SU2FgsS893 zLYF~j^D$f%*_OO6#QU`PCb@*&3|5C%e%_|g8MXUtJ+Ny@!h(Yg#Q^Re*(dCxaV;m9 zGJ#)LH9biTlQV8;`M+jf-2wNjCasK}%eJHferZV-cIOvlGDFwt0Mjgy?Z?}-<|~!> zb+fz?h;HOeQ4^e5+25V}BIY$e94t!6|Zu>`{K96QgfZX1QDk1`5)l%R8s-VEBl z{lSPOrFGW<5yqRE@w#+3lJG}FLd_76{}-}ak)Ao*>UD?Jg{m=&0yR>mLZaTgCYz&q zfP=wlcjQ$MVJMyH!(6Xz#+oRe$GG@ytbcLz*ZR=Kl61vr+FrirZoC3FNVTL6Xdze&hxEM6KK+Ao@kh zyqw169~Fkb#&)x0L~#J%;!pQ)+W8Ix<1_g&_~e-At^U3Zhvk2r_ka!r+eBJE_Tq9|Otq2mK03Ew zAG7Vg=mmRr)x8!qStK^DIf-yAKH{9cmQ5k4(yWn@Gx}FX$64(|J}PozG%bI{*|){} zqWi}o#P58ylg*&%eNU6~Kd_u4|My^AJfaJ5p;NZ!;n!_r(L&247hn5vOT7jM91QAz zr|uF1V&)kh{;I@7enmuN!(I)eXp!lZJ2jPdoEi4ku**gP814XkXrvhA45rBNwUOV$`Kjy9Iq$Yby1H+(+gJ+_je zwv5=RGIAwDn$XQ?k!Vw4V*E)3v%kB|JR46tJbMYu+-FycJX=R0Xm`8l@6~9+!|zTk zpN3-Q^fDr8J1d^#WbSI3-oebj3l4&AUr7>Zk`u@*vh=<(gv1Y0$S<=+HBp zoUf=PM?8gQ92x#T>G>v~O{U4x(Wm9)~tL*A1@$B5?7wjF@DS(!qh`m?qY80AMM0j%ol? z;fTg718ioWLShopE(Cp%mVwa`Ikk{iP!ZU>L(k!?A5|=?j1QhPgzP(847$%6ZXiqn zQn$NE2W$aVJ(Z}s)_CRNHP2xQlutrjq@W^1#aC2nQHYyMh^5zk*XPrkDhSR6JX`1B zT8|wjkeX!kt165*Ye%Zi9NGp>Sk_q$Ep2?N2Q(zWMo(oP0`~U5?(8y_I1#gbSFmdO zU4k?JcHTPL7ZevQlnbD&Lp99E(ZuwV*3u8;_IarUx{wZ&Cn~=* z_l!k!csrTFu8J(bi(ccZps}KB9*H7PNoF`Kk0sykL$-W3C4*L}#K6>wmGaT~i%s0)$q=o5_Pd`yiy!5hzpQrp0tY$|=-jH(sDxEh_3P++UZZ2|7XxTGm+d2>6Ev;{CXny`24t!^?YIP$}-GaJ|d)@J!jamAE9OzK}&WW^1RFc zT>QKbG~RXZ+CI?HuhP9@F=s3MwTNA5Xe;Z5OKu^nL51%l(PRAi-L3It-V3^m7m{_jW_Q zdgz+89?WN{j)6M>JjeqZ3+pJ!=*qtP)*pZO4zktycHpI$dk(lsNy!1x@=oEKfU(1D zy7GChorJsZ2KO~+ z$y3>p-`W1_nSjJ-dQA-Gc=ZU%?!+;kzx$Zh*;o_J)GN={T@LE$E&EQ~palLji=$}#c9vQ{4j*3< z9Vwses{+U3dQs$vt9_g|xZRlfPkR%yi~p$%w!qW#>0dIzd!%@3DI%?sL`b~8CAZ_K zo=QFsBG5gF!;R_;y+Q5z>a!Y*{slLPe5~`5%%!V_tGeac;LqL!x^tVS{Enkvfxe@C z8*pdQ>5}AZ-?Yf_07fgO&tiU{6sX-)Cr9=E1o?=l55Ct-?WFGf_mf5_^@4J)x1G}~ zV6Aimd5|;dy>E$^kwn=|^M2P`}^X@4c1%tK(6!nuVo6 zOzwN^B?kiL6udwCEuA-&BKkfIN7BE$l)DSSZoH)X-lM{@A70CYW-5+8xii}RFiEnJ ziLTjrP9!DBNl~M2n0R*4gGbRq_Mu{%fzO`i{JUJTqOeN8^N^7yS`PAW&uu1(C{B0< z7gpl{xe)n^^-m3fW9>t~Ye}lEI?7Jx1%%+PHE}PZm!s~BGv_#{u-kCs+`~jzS>8QK ztWJEC;lfpAD#);zOrD&Vl$_UxoVV*>OYgm1WwLPKt0nqb1JT?aR(8&Uws{N*XR+8l zdQLsQ*P~OQ(_bUH5WZ3(-YidfZgqSsRZ7TtAHMORDurh zRs3(zVU%~`^jlpr=$KN{ep7mzy=Pr)7j=!|gi--7wg+8CB?G~As&w2d?el=0(IO>X z*pRx9vCoqv&dTt!)lzT73j2?=&4NmP!wW)E;fCat{FuWf6)#%8{<=kHcM8(MtNn=p zsuiuU8V;rRuF`geklQJ9uO4z?X~;$teK;1Dh^i7SpsXL)iO{*PE~$BKE(j?OJ$8>% z(#&NTm~1 ziZbIwRyO{*W_~=OUI;)_#x@g_v?yey~ zg1fs*5+npCI0W}#jWlk-3Bldn9U6CNB)Ge~1h>2Y`*hB!>Nl$Tp{v+?tvSDMjL$6l z1Y!}&$dRXl*YJ+;|9}A(QvaUzS{q{I7`N^t)YGRz9tHG?vSqo^A-Jv><7^7;n_zmd z?7(hXT2Z#PvD3S2RN*+VZ*L#XpnK~aR|UwHGH!43?AmV9abauW&!qToPnXTx-M0Z~i*?7-(s|Z- zIbSpHL!_Db#>!U$$FcJECD#@#MMMAnd}Vz^J?uZ2!8gge#|!KMzI6r?nlUb7Fjs%v+kuAwWz4T zcbUNB8xch))TCUlIYD=>I-V$(^uewX%h|i^bwS~tsP^8U)F@vnQPuVaVl5%xv4Z*s zQ6moN_ONxaR8@lCx>>RyXEh$@lBtDo zA#1zVGH{n{8vNnq(g}AOj{v>G@O_~Ia)v#vyUx$u$W(wRirl^#Rwa~l*abOWp>t~5c z$PFA0j!_v95s?*HZ-6^@AiLf;J$NuncR1q>R;E{YSys^?s&0o*pX`b_pJ@L*x2O`6 z)QR_K^q92fqH+ipc1cx}fufB|sKE7Z!s+XO2+Dc@23ByMSUl}h6O8FArTl)ie$qt0 z-b7H2xYm7SP@y=oi7z%sA+0b?F7W&ZdS*uBNiU{;t-1*k%%kVLeSuBpzx*h}{)~f)YszW$ zJbK{l>{V~+{7%SCkeE_9E27bkU7|_(=JH}ypYy5$8HezSh)YXJ&UgUs4@ac!fJpPS zIW502#atCd!1Ju>xAX=S!pel=E3R^E#wU`6Lp&}?=Msp ziokrNqE8oD zHO#L#sX$yrHAFf{zdC`$l2+Y=;Pnmd?qG?>-ULoCKv0;1ccMZCGQaevQbrw}UP z`43)&v5oW@pni4c<|3>zGq>axF!2^(ki#=Vp-O8S4rAk0(t{vCEFiG*v9s2zNJJO zt_~qi`K)n)oRet4VIU^ z<*UTzb99z9n@g2!Tv}U&mt0PQn92DEJ|@lgJ`n5sU9+{~B=On$$=NMu>&&xRx}G*z z&esty-JSg6Tdk6fuwP{Xt^5rbq$DIUoNd9c>^zkt?QI(x#n4bGlSsCSwUM18hyiaF zqPnB{XqZzT#o(4Z)5$a-MxmECmh%>myZ4yL2h9N3ojZQ1 z-iY8$o=XU-7Xeo0;?xg+s3hKl);1pWe_i~nWZQ!ZZ0hot;^U&o;(9vR zS^D#&Plg9EQSoaMIt!K{{KD=8F7(O<7mf*H)*y>h8qky();zo1rDuw196gviS$L5? zc;0zKTNM7m1z$l}r~^of7-eq#u$SKW67SyQrk2vSF!EVZ1v1bajv9WA`WrQRS8UJn z_VZ>9gIT8SJ`KGSAJ?|Uds??r?@e&XDAm^HE{4Uy%n+L*W_4*Ml63Ymo&Dnkg^WU7*oK>tF~1 zM7;#aN`$j&>#;D!T|+btto>@{ERD9@RAMKxo8aO@Js6Y9T zBQQ1B6~LUoR~-Kizey1N9p~F-aSw}B#aEfcysH`5;C{nVZXEmN(_}HFWy8|Cg`r5L zd^;Z+Q$dCaD8~%K9unD8Mn4yQP`?QK#0}=T#qttRbPc*d3)>aoc&>jrp+n$pGB>zC z5v!(G4I8xB>R=_uVgl74T`j;3m}7}0zDNt)97@`*wUS2Xs$$9m0LZ{zwJ>iuT?IiG zJmJ+LdJ?y^ir!ww8M(7ulZ5H*;I?X#{wXZ!uRuW;mJ^oi`aL(KLG5hR9Z!y2J3Wnl z8KSpC%)!FD$&oQa6qyKqhflQFO!q?rVc$Yw`5>BSzR6c`tRKkc%;)%@_7(+vo)+|`IgeieHm3`QEpX-8HnVh238j?_x}slLtND7-Yxuek z-0aUzo7UNUpE3pCIJysc=k@X=9(u9G`vMTrV^dR^e9@D(E;f6~Iv&My9HW8tO%$VA zp(V=Mc~)2$-5!3x7q02N2U9+2cK(6$gCg_a;vUQj${7So6TBPVlOY9g%^uQA$j(*N6_lqCYbH2PNa99LxxG`F)hAtgu8dZkwGHY-N6?u z#@s9I41P_O+JM+DewB`W`@P-7#?g3A3p^@cT5kIHcUmzZN#}R{#1&|7i~TU$B#M+T z7*cc)KSK01d8q}<4|37H{}`(_qrKX)^^4FaORhZ*Rxa`M=Gz4`eVVb&qdkerdPBS8 z_;h3CM`ep`r9(tngZ*94Kx<Y1=9s)v}V4EKh28myJqna?wkR4rc$J zT=-~*OTxUvo0FjvW5@8pjjV}bS);g!t1J1;1WvriT!Yj)Be9BhA3y3cJa>LiUT?_4cnf?zE zc`5~hQKqr2Y(&xkcthO(a!O{0@Pnz1Y^hKSGsR|cq8A@AXN(I4-)P&SHiNT_(Mhb> zZma)bPDM{}4$I{qi3}*c^f||?|;%R`5pWNB3zQpJ}ERX@5TsUSZ zgkzE-eng{#VxXVLRb^xpyG_|eq6#yr1|E|5yZUd)bM^1u9lYJ=fgWy>E>EswHeP-B z0Vr99(-?2X-#mHY73XtI&}(q$<=PXw@zA6s==!wia-=L~Yj*yfTKoN#gvebFjL6jv z{K52{T>a4`K+3T8ztwNe2Jeqsu9P#fl>p0;&I2jLLCoDwb{Bm9&ek++68Gja&-%_Y zq58u$%CX%{y3^(DQY^}PgZOC5zcP(YtG?uWW0Y9*(qg(7jq1{QgA4~B|5YGHRPg?g zN_J+w8ibj9G@+cMpV7EPMy(CG-$v9|0MaZ|AuG(s(;*YL*rW?F{(XE5iJZfYD)ZU1 zg{{#k(*laS1wW-XNA$v;>jj4tQhu%MkRH7^x8FGF1Hz2r?`-nq-;0_faA`#)9ZzrQ z@e>1Z)4GyiA`ZF>LNj)Mix;>tLp_<$>Usx>YE*6|61@-S?E*>E!?Z4CImHzfRndsR zJ_SRP(j>}8=<0Vex%NPlbvUYY1)w)K(dgsI=iqVeL-`PlH5e}2%jrI@9hokM&i++9 z#6M4qL`8%K3p+Zxe;QwFD^LeFxy=Q2ON&jyvqz8lnMWUb;!xOQqBSCdIDlAYK`rhS z5}uQJjaOh0a5DHcnAVd9jTozU06VL{+XsB1(YAMgT%eh0g6mA=yI4brB?4k@j0WKn z=H#jT;lMoGNpUO~CmQ=Es3yIkn1+0}gm_UNFIYH5Os1VKgpEk1nrh_Em;O0-_D`fU zuQLuXfs_0%lo(jB`UfYtzmm63-G%G8W5&5oyk($z#sz;LpJcSg{43GN z;P>>#L|i6?)ZkrpASR1xHEKb_YeJ{Vo)Sq|CujQizB`%opP3@)_;V!q^jjn;4-Wns zh@j3)8F=rr-GoqcyZ!F*84F}N=cG5TjZ_=>FroX0G@4Sw!vbjl*oJ_;FvBB${#T~` zy=D82D7+$eCRjE_ELIPCjTS>1ErhO2Z=C8xq!u)&Y(ccbR&qWpCHwiE+YG_>PEH0< z@&f`{>dl36I*qof{1#Tr_;cp6V<7FHN1EhyGIYFW+jWK(kK$=i8=@x;%t_ zugjZ#gns$UijXY24|LFa|H{R;M|3IEZ5B=N=Zo*i1Z6A-k175?k3iAkttBc!fw`{jmJ%-fZs&OEvZ9Pu<$$HmCBeqdXc2cTYc|*y_R)T6duXo{JxARF6 z5oC`9#aledGmBEC_{jos-{m_M_xP4W^zod83w~)U5l3UbU<~TUZfcK7hG_{s*Jf=A zj1kL3kIDjIma~}Nua6P`W>h{IvtJ>E_R3;Z?w6ANb?;ZU>vp1&BSE6}W-;>yE2G-Q z@joK!b^S2CJ%(F?9>#r&U|Gjs;Ugn6oQnKhlzlFE+$%0tyGZeQ$rmlnGX7lBy3=vC zp^4+HL(*=_d_RBIMviJrs|s&8wiHBTMxvJJWbLDbuR7u|M1(6uIPP2+kcz_|8+*J9 znM=GgZ)XGueyIVT^aaYE5+?WVb z?!S|(Rwyck4MnmZNj#nl=zm-zJRxH&Zs;n+j1(9DY{YV|e>|jlgmYhd{b#;Kv;6gh z*VL#+LtKK6ip#%d_q(~Frmviy78jP!IKd7c6c6cqW^#J40%r!zkJZSw*HD)pB@tWq z_wfK&u^QT{D!=bPVcusRIR4&Fyl`5MFgq&C6yNo1MBgvxcl z`=3=dZ$EFn;SwG$4%Hg%^{~p0+#uFSi}cg3xXJo!46JFuB#{3cS+7CD%3}NPeDHX4 z{7p?Q5Wr8CFh9hozh1`pZftZ1raGjsZPeHEJa@ebUI?SF_O8E9z5PH-G(!C!p9fLk z-MqN2`d?&KS!FNj<)gMOanF~Y;VwRusqO6wG}n==H`||kNVkJ4-JM8FwUd}fj?eYt zQe)f+)3BzuqmIiM!Ou2mZD3qy2sRj3MMoxP0?Rj11I92WMg+zIP&k*Jmq z&E1A~NmqDpiC2KL&R3?3m-s#q=1lzcp8f587hC#qp1jQ)hu`DJK?i-KXB%Tq^d}L= zjUws^Qo5JU?C1M!>p*EuzLqW-OKWayosu*>4 zrwgZK)RX#hYB|WrJeQP6vvtp+{4)#}5&`DEe^Viw{t&|GPbGCSn5fy+&nWb+q_vCK zkx`Ml?!i*6KTFgOxh9jJ_&5rM7htS=PDl>t>pyqxBM*E7v@7>B)L$4FoN*Xj$w$)( zi6+NL(Y}szHP5ljnD>0hm%)^^z7HKreIjbahO`iIh>4uI+CnjsI+V^##-p6Xxc}8d zd8O6y>AVh@fR$>BVKR>3M^Pjy=r#QIf4GhKte~iZ&uEFGSHw9>l>SQnH4EAlRi(F; zG6~F58U1uxPWeKiq|vPc4xC?P6QCZpyIgEdyu3u+IdkXMBh*zZ`+YF zg!yES6O%^fk3U+jK61I7x)Iu+dhuq0uEZdh18Tr!pPutzR-TZo^Zp7(;QZNEBM`qW^#oR`$I z<<{<*!|U%fsBAgam{DpK8cmj&N?k(bv*jZrJG4(Lju-sQ8PK3i`1&)0fki9>gJVx9 z|G<&`Z|s!MP&sv}nOMXRgA$W)af(=V41!ta%WWdi8B36`_A^0l-CDM#ll}^SL zVZ~#UjBGWdj^M0Z7R?uZ>L8z2f{)%R6=1?sjE7N1G%(A*x9gW53Zr6mMTyuikxo|8 zl2*W{z$aPm((3)huBol}2j6=9;ltn&3mpqqQUhW1Z#^A!LAp@RDz9BJSL!Oma_S^> z)#NHrsW-M#>}BNOW3m%KUnM#ml^?dSOcshzqYa_u{h4n`pepkjHxHLwh%ACQvbULZ z`eL9j7-XXO;V~w%8%zuIhl;gD1bsfPrQrHAMkLZOSyNAK?R^%f9#X39DhaVQZ?*eX6cY)fHN4PKKpOp7>A!GE4 zho~Fm1%XK_BwN4@4Nc9wH(oAz6w+NM!>xfO6a}SF2G zcI*GJStG*9zZ#eeNA_YY^F1bqxnD=j2$UhGE-j33zX*=p)yJsU+{aw#ho}X{NXqg0oFtiZlkY10?p7{AB;K8 z)~P0=Z*|(RB+033CUjV6AlI;_HVVnIMv6lw@8_^4oq(5q=+rIHdd_YfG|KFk&@FQS z+lR!P8`726hZO9k|J4DAlE3Q}lFL=Jmp*>b=G8yFU$8!vj#GWE=P)PNKPRN{nt#9up=dMLXv5HJ#~YEGb8LACnrWU zNdM3cLC-h!@_mt?mH7}j4PJ6y{+hOCZD%~^tjXr`((lsWrp-<+Kw#riU`JozP2YqF zgeW96-H2*s1qCraXCQ};ngLhZ+Fg!9Z^205`~RNEdB`#!3AcHub2_;r_EL52syjH&Bv)FPYC$P#oW85i3gh_||Ibbn^RIWB>u|y|To#Q@}hIl8vVG z!sR#9r4Fx%`Qf(PXgmk;J0^#@SEC{&ytR@S9}GRooW(rY8R}tD>?`Md(GpSM3mIm#weYWDV#M7x`X+t0sp?%;e>hPN&i9y?35%w@35OWz!JcDC)+rvI z^BN0%O~6(=lhl?e^4-RPzo4(6*YW)#M<9HQZ@M`UG9Ezu(x8Hug_P<+iVnQGwPhS%J?OS;^k-*+jjCs2-uSaAuEP4~^^IdKznOUAArH-k<{$UyG!VtYwFeXMswJl06syyOt zarfN!33qn4<&z%gJ167bI2>Lw8_h`RFaKnM*xrR(WSvJWJDi3|eq9T1F*WE)HR~l5 zBd}nVw6bSlLU~?6NTgvmc4RrAGew z$va{&?dZuGL!1}V_YquzX{$_&gHg@N{*$EBXE9l$!Pw3mh7s&(?~2Mu zS#4r|ESnxATHb%)A7fznWu)6~ci67`N%^W=A=8vdYP15b^#ZP#k4HR2!(x(L6G_c9 z`3`Wpt)K#NQZ?11pAd0=q8Ir=oh|{~MUT(Lj7pOn(;r)|Rd&&&UB5*#Xi{!yq|-_n zXbF^xFsmGX|Bkiw7(@5>?^b9_86Wwh!|x52p-qhkwq)* zqTTG+B-1pB^X$?T{#LGc8s9QSN2l@rKjEM=-=s24aD5&}u&i_FUe(0LJ} zwPzR{iH|>_H`*VLr-E{#gVMSLwB~ZW3o}Tszr}u$Kjhh|ObMp|bWMLn`bld{tKD`T z^W`w5zs$&giXr_4*5=jY>C(9YU!*uwU{io#XxLMe_TgYe}GGJfQ!uP8*MBZCq~I86?HHgEf9bU z0j8>WoM^+j8pftTERu#U6xt=MMznG0venJesBZ#{-z32=*&eqan>D0MJ@$@m_M_1==p)0oL+f zF3;JcH_zPa%<`?=V-@qXaX$g=QmZR@koS+$cQK!OsG?d&9nTL#x?Q;Fl>$x2Tm*L8 zK~wwPb@u+_@UU*L)>5|7q2uPQ@v)Znfbi>@@W#+uR^msLH$0Lq~;7mQTKYry3HwB3U9 zwZWg{M%6dcvxSPT{U$rz7wsGOTHjFt?ar&!(AN}n_tx@+hKNmNU4{u(uoD!+Rq3v_ z;U=HGyD>aH3wfZ;U4xmSl4W!-1P$4m4^&-Kh(k!I49Md0>Uk8LNx4Iv{AuwY5Nlb7 ze%tX~p+?C<{ZD=Z{;<5(o(R!pCMOV;^d|?v4Io=oZ7ZAgF0tYhHHUw7*k(!Rdx>1- zv;HLdyFlexcGN!vVcwBdV#3+fzYl-EoTY8k2>n0}Is|>}+vzxPh9CA;we(i6vJpje z31!vkidKo@82^1P(r>e*oN=8yoxxq3vn;8AbWpPM9S{b?&`*3?kWq07pmq|Edx#f& z`clcA&S%_YHElh56v$>7+;mGZB)I*^J8oz{D+#ThHk{VC1`mQ8Dq3$DJ~+1aI~?}YKVLS_bRv_4{7b#<{R30B!o%F(Fur-YNM( zuRBhG7}ee&{4=iq#};#np7KFgXup6X0vvD{GrATt6h_NNW!`-XhPg$MWcR;- zeTdYlbtXU#e~2f<8{PlgWzzDs@sHrdEz8B{uu?zuN3}}`E8I+LZ6%{fg4>LeEk!>c z%dNlGm&{+$#_$ni+{ht2a}(o5S04B&D;HNjCt@djEDsY;gIn1Ab|spk$3jh92Ghzg zKlTLdE*I#@3DQjQty3qg_o8BxuPr5NY_U({&)dKPOj&omROdb2?aPMSOx_})0O9smXOyhU_O%;3&& z{N)I9l_kTe+v(Eq`uYCw`o=5H60}nUSVP ztX6^^sq?nxHIc4xrx+|w<|u?2R=-Ts=esKNmMqXuuz?QNtzX#4FQ^mbE_lP#%>Ld2j(F1y4RlQalUL znEU#bCzo6lj>%fxss{J>3)YTdtD)#=P=tol#D#VVAQ6u5Cf3a~D~uA5O?CO z0g$;o{JItsaOn7fPTu%pCHZ@oKtxyb4?-r2E3E6iyb*lrYY1KfRA2Fe{WR^4$iKA5 z(w5=(!~1f#pqzkxfJ@y@L7xAL)Ec+jt_j-FZ$pjhm33>jx(a8;}WI!TZ`n7Z>>ada0KxtfuHTrF{f%qG=9|Ki#l2T$j-wI<9DrVf9) zoPh*e-8%5}WhjMaq>nX|vQ3{Jn=2f&MBco`ArH6lpvwe+g7vG-@hz}vWBloHBig5F z!;oSz6$a+*-)9*~_1h;0>6kRn9IG^^bz1dRU)JFS2=2X4l5LVq%)8w-)qSZ>t zG#jtnisBnysJ=IEs{3I$faBa`YK_osO5j_{18!TEOC6%E>mXLSNV1^x$d5-tO&Ps( z2=%D&71;}LT}P_>kMXXhTOIM@pE#>EI431bZqm%{G6F6X@3J{+JTdgR0^Jh?b#Ji}O;>}NJQ~^6f zIX`_U(rqmAI3hNSNK5tpG>{lrjmEdYtet$M5sF5QH#^;9i$9OzaK(P|t10(&rM{+U zLKt^pn*NpEfDxR6Vb=ahzaJwfz1^B?N-J8@dIx|_dI#-P=V;>tV2Q+EQlVtp6r6C3 zSE~(je@OW7Mw6m@(;uceemug7@KaM@#~2)qxoTMLxhaDE zVxO^NOV!)WRrmya#ikxbj&NQZ=UA0mX>SnY-DC|q_#ve)7>VcIc5+h|T6mGU&hW3q< zr2(*IoAxMM&_iY9R(SFUm6OiPQnZqCSQ`D$9qvoUlDqWIs#@Deyk4p>Y`+=9ye?5P+G7A3ftA(s=dFqgSV4+xUQ+O$TrI7o5?moho8 zCSq<_e}1g=8b{tN=Q|o%Pb|UL7RMoZ>dM5P8TLRlM(V+}#qFX@@(_ANm3DR*_?%Kz zRlv5|Z|Li>@~^iM#j4^JdVRk?#O}NXm|ON)_aI;kSin>W{4aO^OG0SLtt!AK$MU6h z&b?+kWlU~bP7>n#qp4LRQpO4-lCH^AY!xUA&_c#U&9Q!9UbMKdCVNoli};-2+pehR zQ5T|%1P_^EZT;pVNaRj(Vhm?u{+3_4iMdc~$Lw~p~^yDOG&pW z+jCp)k4CC@KjOP4+lZZJytQ1D9YO&DGJJw*{q*>>vTE)B6lD9mOp0_Qg@sIa4I`RI%p3SsUE#5-cxCT*>Q0V*SG->#fKnUyb6OT&#pilFt=ks4b zr+4HR3ZQc80@Q??HK4`<*DuHSV-9Q=KfVL*P?_Tsn9~~yY7ce)9&PeUBHfXN5cP%dYbou~wIQ7Pl zcBKTZy4y+De6bpzm+4OGp`)K4(ucU`kp|`ex&L=eq+g&h#7PiiUHlYfGPr3{!9zgt9XRH1( z?)KelcC!n0uJE_EC4FnBvkMJi=9MebyfnAmXD4yX$@eQ>=j{he4a}9zTWE-dqMrX$ zVxO~ANtnFyyxl+2)0+b7YS7Eg{7s6m+eugiTkEISy<2z_U5q0AP9N|JfVDUh!}=df z24LavAWal7W06&X`3FVbp#?E4494xGn&~zAN*;Cq%KM}XbZ*~xVw^&;=KB45s9K`Q zwc5m6qwda|n4U+Zs_*_yH$0hw4!ILryON!synKtlyl3+5eORKwd1;{-anW#C)?C(5 z+E640<`n-e8)_fi)mPIywe%4ox2dS?%9bx)|9=8u^NP|{IXZ#7`Fzgk@*AMwo_0J0 zhH&qKU$bBao<*LS)0ccNq>LaQ(y()V%9`#@UrrG6M_-qcQMK#5F-#IM`cuO=89t>( z{Jt4gi6f4#)oWEvvPQAq%b#6oA)`x;oe!uXVlW@t!5bAV_3%3ka;>-sWJ5nnENx4+ z)85QSwUKS`t2wEQK9%8%75M(t;9nzUVI{qqNF&70OJ9C6&8aw8D`grd6s8kO192Mx zx!AzefX9OH3NfCiRk{#edZ)&0^acPpo)M=%g<0S>O$t9|KeMw|avKz88=``_eEcaZ z6smzAOD219gQaTG85xCrH_&v$-Y-5OgzjrKhmwWiflFvZoYJ8`Mc18u%LpE7p-I{A z3E%)-159b}F0w}{!0Uxm}sVFH3_65U4=5JlL!*Z6nNJ=Qm(1-6_q zgq|S#ndq8RF4>V8oy6TK5Z9povez3uV$x*QN~+UpqA{i1!G22d z$AD7bcS`t-zC;4NX?+nzYA5Svy`)p?Jw^jAvTGv6fdd`q%cFc15iUB8uTB`5Db;Z2 zV<^4*kUaBJ;A(G-ft;&v{Vj#PASr@m8k!11)%)g zzYprE8CBe14W(14gr0S@3vV*AaQV&cN=DnJixur)_O7#P`;wp9fR#`NG!A3}W%e8g&N z>*5j;%vSnaTF~O0(a~aQN2YOR=)Q9Hs^F76NvW2#Ex>G33iFLRPwKOrf8Pt^ve_v* zmYM&u@wnN#0XgjWSQb53ca+*-G{$+1G^7{*2pkI}OnMA$#;nBWM3=DF6AWJ;)ys7dDouZjQXiM zcft+~Tmq5sFuUx60FympvHEF0lfQqz-jmk(Oqz5XYjla7)6Udy7QubRjlm4#t4@Au zY+Klot;E!a-?U|gfgVHT4wM0BX1G$A1H6rrD7+U~Fx>1F3@mMVuL5|9Zy^`l=6;-m zR^28YRAqLq?9L+IhI6jEx6M&d=S4e57FTJF`9x^2sJsZEwv9QBouOcU{azbs}0XJG|UP*1ny~yo)^>_U#h-EQ$3F@T`(oHzM)S zdYX7}0VWww7vR$>e`|etBHW``YjtIwtI!RNjm3F*dbAU{=z?#4JSuZKEJ(_{O<^>k zW%1qmMbE?I`Xm3jTTSmdbhM7*e-|0Hm=T<>_+2XW>IpXWMic4wkIHrYo=+<_?R*s; z+>r)cp4|?wMo+87;h3#KCYflf1f_|dy{=~us!)pxK~ zmc<%5%7Xd=)&Z-L7{FvW8qOG0%j#rQbozrQ_ugW4O8 zx%BQgx;2B>wNY9~1B9yEvjS(|@VoA>MlmDbu-Op?bZscK+pL9DH6weM4t{C3{e;*K zTH8;S-~U3RUa=w0{vb@C5xvVWiY;yB(;Q&jW74DPDurj=5{V8B)M!IK6)qk;+0Q|w)<`=G&L$)x`BI)|hs+aZ0`A;0RsDe$;6~oYP263Rs4O@B z;#i(>hs1{oL0zBi#CQ`q{)!eU{_aX-M0KBhfgX&9Ms_c+RWpRkwvQ4i{q4gtO^AD9 z3%!h9J0Dr#et%A9La(;J(<_-eAna=3N8=3+>fm}631|;aHC!0y*VAaG;Qd%r%=n*d zOgp$2368tUiGYuRg^%WkCNV&o(&OGNiULqZk*MLMF}_v)enPM9<#3u( z!2o%`&ji0ZVSwLcA=h)JpsV5Ki%IzTob~oqrzg7%r{0?}C-6ksqqh8_PU|Vr}vukdD%V@rgPW#)a zE6-#5S5aDp|9pWoYPnIqxAAl`OrjYqUw<%#d3wMBY!z6A+-30v=c|>3c`R3vFlD2b z8>H*7RIAkJT@LRQi$%-HUD-wI~D=A5;E&v$p|_91W|f~~};?kz-oPUTmn+ku?|kjcu=e^i8dn?>hT zgv^6bHHv6PbYfZ-6E?g&i}#}TWL#TH#k(hcKuZ344l4Vum!>F$p}x`BDRY;Jj%EB; zCd_gL>C-4BD9#rz! zUOMSE#v-umK{?0GUGD<>aYFf$?3#4iL-DR!C`4<5OG1NkAP9<3LC50yPSP1p))C65 zbYMMZ2mHQR<^kI{W51w!Lr|)dxN{KpHA)dQ!N;7|eHJ+KC??&B6PA3tS`Qsc`J^pG z$-EamI5kW~^W*-#qSroZr0=P5B?bnO|P3X7gvFbSt z!PDR899n2k-!aZx&926ihJ+0xpLwr$bWnbgzNTSWhpg;jBkq81C{mte0pe^hRfjM< z$Ln&8Z+{MaTaBG_3f%hYa@oaj=9s|0VJzF#?^rSXTD>Z5q6h%b1fEx$}1@%!37*lNG7s~}P}-~3UJ z+b?Qf**&I{y-dlT^?uK06dB`XGMXbACkksQPskxn1K}K__hl@J&X+S|c25vnrz^SZ zCi_O@)uN7?robZ-byu)R$Z#M<>A|mNr6}4YxQp&XLr78LwYR)mv$d};5=NTPA3*?l zWca0%4~5TfA;dfJSUtKyv@Fw!h#VKrhwdhtVl83>Q)6h1ontdA!+s|WO~e3Gyxn+O z&UUp1Q`r5qZ|$Y6=51S)W^v@T11wzQH33Wu8VHx&ZTlN3b*nv&>HcVpq_SOwK^#ZL z)ekw`TH3{o6q9NIGmG>$e1c^o?ak_G>`*()@}Jk~P9$DvqEnp^VUGzUl2Nja=M3|x zXPG(AOL)_#sT|;Nvc_G7NiuI(!>2&Vudv4bYyyz)LYoi+2 zbc9l$Nh^u!;ctqI652T&O}6rL^)2JP)ThXL{5H~auvx#rx~|Zr8nbA-X2r0viI`}A zn7M$ABQ!CFby*kOBpQ}xNR8s%On@#2!WVCm{DUjDTeQyS&*)fB70Mby^bB` zOON5Z_w<*<;VxiJ$wQuwE_I?J;}_me(3WD7E1Za?KSQ+k(gptmu*c9M-!d}1g7!7l zlRRtq7~e(gj|~(XOJ6)sf8EGzvzNcKY0dc@KOk^iUbk~TI^8>kzKcE2rr*k-tzT{A z|LjEPtNoVtr}O31fecPqSeX1jJVPwCG>1_a%$T@h(#2tB(iS&9P2Cy5Y``HSD_z$K z8gu#he=K0nx__vAVD`qI7k!WK)u$wBo;OV+sMG9BvdV@(b)8+! zdZoVqAQdtZE8NIv1-Wkyuu1T4ZSCYa%VI8qZ+@%tznr+aqf&o;z3VjIH)rS4niyCj z+^)9Y1%|VvG36TZu}M6Lh>X*l-^t-DjwnFfM7b-t!D#f7S^kyKH-*Vv^oFvoq^R{v`)knKyhK6Ef{ikL zlfeX#AL^aKGYGXdKjLL!+*qW6n8@Ia8Q|m8^El23$*x_ z%JtR1Mr-fyPJV(9!TV7_N7+$d0tA6nv?&S%U9^e@jV_*Hj36wyf)B%J{`^Fh1hRQk zjCVgK$#f$tS-nk!9OY7;RZ{eUk2AYB$3L@j1^piz zmF38A1|xTY2;omZ_2<(X8er>6dQCfRLFF*1Ir$Y6WXrspg@+t`rwf1p&19PH_-YUl zL$n!Xn#1q>#nj za+T(JRw;ukMPNxfIe%Rp8uxD{*plY^8HR)Q~idvTp#CcAT<1`=~_M#*#9Prl@Oaa@i)ac=j@b2|J} zL&>`0!XxU4xnZU+&ReXDRyNV0I^NBw(q#gF5@)>TMQBhHqemP@-yz)_t-~P!vbgE+ zdr@xrpW5q#RW1BQ$oGA5`4WC?u>)NK@vV#Q_Jx`PIN>Hull4k6iq{{)(G)Ae6+cg0 z02=kBc{lS_Gq3~L%U;m+OC6CqS^XBAr<%3-rBmfz!m=895Zk_&vuCSUc;=fTL$T5C z$1RlC=NqTlCPPwwPW`HNUuV^fDuh3LFgNfz0bJ;bI)g4`X?m^x`!BW9cT3e!WR1hb zgbLX1J+h{bV@Tn+qVrmavsjqL=O+{aEQtNOA^CkuV{?OKdPwf;KR-A!z(>=2FVO43 z{pW5oAVVX=vThylEy~1KD+XiI5T{Y|mJZ5gcf0sgKH!Z`*>-o*)d6l(_qrWwpiDEv zoHrVsi&|)nS_(RfB3JpY3Fn@J{D2j@_rc8zJx-n>BPo<3&WYG!)c53?73aji_c(a^ z_a0+o6Q6*&=;Bp#e!aGF)5~}Z+llLrxVyQPeu|c}p4ZC^(ZndE25k7UgofdJ8rb7{ zL?U22mxoQFkXAnGE~%~f$+FE|iNspUB*dc7txS6mIiezVyHFJU4A1Obw)ybO$sNpHtdik40c9iddZs(fSuP#tiLKYAs z#g?{#H7x_ftL$q3NbjHHMMqk`IdLBFu-W}+{se@AIrwhujBJ~f=&PKr!{PjD;I{B- zG8F6u>4P53-#uR>(Kwnv+0L)?6+CIC?Y%QF1=DsYv|&Dp!H;;1trb1LIQa-Tc+cTC z=G|Y2`?{ILG=!>`^TwM&@jx)Ej%Zmgrk97SPeO5~O?;(*q3h^UT5 zd|I`t3_$;ntF!)!`fJ;^baxF6B1kiIgMf%iif3B@LcwJRq%^+ofs)S$%7s3=kWpbYY7ysmFYssX|Bs^9>*wd^Pn#3^KLp6NYH$PV zi+GC@qeCBcVHkus*R;W|SM|}_iRwl0)z;L^qvj8ALR0?OS9l^cC1k9V7G?eGL>6VNPOfa%>4H*p;ZBN0(QI zf++t(TI?`zfQL@jE%%h3#Xd|HPzy9KlLUPhTwpPHoadDVR<}>7{Y51;PsMW8 zxOw<0`R`}5$YCEvOMpH-$Cc=r%5nvEo1{$nw+aKn4_DkTb7}V7G%duYD@0Kx#)

nL~cI`e#- z!EK{>4|V;*0@V;ogqOPpdv-WlX~>n!*0=O?v8dgDpX^}4_kE5=v)(4^;dd{fgHWF< znYc-_o(OFM=t@hqvOyYUj*J10Ev}8#GNOF@(=>`LE?s4}E8z!IVL5bc%dd{?4bi506N;sh`9&zlJ=KPTOw*& z89Yc$pVVUGg)p=)wqo0VHTeU^sMk@I?mZt}C@p#?UBob1FeBEax;6#l8n-!0F}##v z8heGqffj88dc!XI85Yu|M5JETq@8X0vyQvoII4OXHQ0>8Mt_Y%%?ewzdbb92vFDmY zc(TZWkYG?P39QQX9-J?S)7QL{7hB~Wc^vbeRYoNYFR`8F^wG%Cs+;*uNKEJ8A0n_{ zCw62;6F!$xBovmHH(_s@5$+M5o!*EjV-aR_IvMigXIfCg{=vtX%s>~%_*khPWho{Z zg{h_?hiT`=!Ees_;rs0RCY|%@Ks!+aLAGWURv<{*6VOuLr&(6fwZ>_jWsDU5@}+}D z;%FAti?*GF1eO&9*-+VLPd=MX`Pr|>KjW|Woy)Bbp&Ru$?R?4u1C@%>QXywpWs-O2 ziEn$5KV*g!%&8?KSkG9JCB2ca?a&Vqa`)TBw#`;a+W>U!6@NM14=BQEE^I zdN3bT-ONsL_oV)SSqvMWmE+-sO)g%{ZdY-iPT1ko9MvTGAhhUZ;*v1}B^CUgD$TrN8L#PyTiq^Z^N(&+yW@Cp> zHdehHLm*2cxh|ltr1fOOluE6Y738K%iB!pj?2;7xGZXULzTLDeB9?#r)whbiyGX2( z!~(V{p1WFO2i}hQ!#=?Ya(HKQ9_oxvm{hNqaJfQ5oS1B~wHq~G@55GaQ`Q-J zi23zvk`XKv(`KcapvBb|cDk}u1xapd_qal>S@}}t5h7??{y|nQR@%G+>djuPNun9lt`cRR9m*`{Ku4>_&r$!@O)^$L3O*& zYV(|~AkfD4>7T#bA`VF;O<~+l;OKMR#P2+J9g!>bPw8v6mognP8}@ui6}8FeIgAwV z6_AV*K$7BqG+si56UMT3Cwe-sTD=6kcBnV3MCc-IOa#QeNgOd{)}DI1vuou6m7b-q zQ|b^ZhDidd1{+IGnpvX_dhWr`lyt*K2reX`rM_WBn_IK;bcquS9!S|Qudm*A)bSp0 z=zmqir|!4g5bxu4iQ|=aa!0m2u*som>vgX+=2}9ad)Z#3L`(|f4nxk{gw0tGe++)TbYdD^oUXK7jLYEVYS)+xJpx65^JR-% z^%9N7$J3S_`{M?FG5#wMQ8=xnuNcTmcS3Oje!nj1 z8cSG1iqyVyo7;eybBJ9SnCgU8$xK zg;p;+0Pk*SoCq}vi<5+j#2^;PZeGZhq<50srpb|CjunB#=x^A$OGt)6G>@QOg6d%- z<9l_vQuXBZ6qR~~(}tcKj660mCNjrVL5*(sZ`tGYCKpX+fR;^u<;^Ig>4lNxQ8A!y z6Qkbuu8=pg6f*_5i_H+Fjzh5tfUWu&LpMMiJt5qwNvonTRJgbY6{IZaC@zx&wrE(`^w ztbN9e{rS6{DWZu*zF0+n8`Y-+Zud3BULeMALJUM=39U@^Oa3UKY}l?6ChT#^CbA(? zV&MCQ98b=B4*G?9noHnxkapD&I!|K?AlNHOC{Htdr7i%BT@i3e>b$b8BI`bI#Tle}+3Zi@A(1ml}-Dy4*6N*>aoxi0PNoRw-0b~JMD@dOI5E$XjObBMZ497XMO7VNX1GWTVQ=Bg$~Te{ei0Z)40_o0ekE#+U?EO$M12HzV6 zhg*L(l`+QAR=h}&Y{y?1^(vPOzxB9E1_C~xSKAnoBA(~!a3u}yFUbL5X~g=+?1^Z0>l#~?V-6y%n!zz=wiaBhkKEwhAT&3a@~gna=U6MG z=g=vdBKltuffgD6XTDx1JiPdepIwn(81{u|q7wQYf5hMG*IJ_Xj_Ugh76flxRI%|L z*AA#rMOeHohd%PcxNSF#-+vgEbsDxQjYto`2C!<~iW*w2`t_+L1@u4^9;g{GK;FAK z{um~L9Pk*N^KF-4M(T{I98808!e=ZiA^!eGfQW)B}C+n;qEz~x)%Uz2y%CVWj-lu zj%UEU2atPzI{EVJ_0_0C!86NN(VN;1Z$?N5o+^?!avguy0?8$pl$QBa3Pg)E5@S)1 ztl<~$?Ki_R=EqbXdo{6y+$0xRee_nnV0MaI0ds601fo?BsyqWfkjbh$x*Dov_BsMUFjJ zqrOoq_lV04C$qfqbcPq-F*(^{GT`05(f@Xd!~uUbWx0X$pYZof8w#k5;NJkl0RJSN zk*LQR$2bf&MFJ~Ae74d$|I}3(%Tgu455Up}C!+32{=#*hK^wjBoUJdAu_eAcFJZAx zzI3ivLuZ)dwSad~>gqlkY9BBuf;ze|o&vq=o>;-G1}H&)jGPa~3y|V&fg;#&R6x+8 z#I5WuxsYZ8ilf<@E5sVt^gC?$8e{8J!=K&u60VjF(qg}!RxLG5qf3r9soALtjbTFn zYB(&`H|x>vmpu${Oo{sjy+#_XX5#nughs?~e1ON$N&`{WuP@s|8HeCeh>*$`)#wo& zfe3-$)qjZ@d+glBaJ92hss%EOikLeK6ldn!eQKH`1Z`Ih{V$TNfaG3CT2$h1)~|L& zu`VaF?Rl?Il-@31h84#f6<%EdU0k5ae~EdyL5E)uB=ic@g5+OC9i4VyO7=pDntJbS_k7IQHKc4N#if8*7$J;tY6#=d$X zv#L{Q_|?yz$VN_(lAWIRk=&Zx?VR;p6H52yl^-n5tFyioaA1sMZfnf>pfnG%&` zaw*E*d!*h%cY6OC>qXNBX(ucK^QQr6Ld1>j()vxiAg~L|>TR5WRoPsKl`0zqoh;R_ zFWGn0!H2HTw9!4Mrl$NZmtXc?o@n`O7m0@_#DFl~aE34ufD2@1Vd;!xklV%^^~(k? z{a=7_$++#)PBQAN$+G+7nqGDG7yWX^C4@)F`Ar83h$stfwnTGmI|SbLy4K>ye)eO_ zkpTc=FmpTMF{(TfP$o-iwiB{~yRTY_V8bz0t}Y`s8Glw$kW)r{$^)2_AgTwd1K6@>YyYb6$wpBew?I*J zHluq|qbWHzUy<2L+F<2#%b!Wojf>DO_i5s?uF%(8AD9d|-?r{*l|6`=OCk^WYT3N2 zMKlTXAdQB7j26Z_oPCqeNL5cYQEeY_A31X&|4t6ft`oXuzG^#39`5!QNWnBufN&RyR=9w9!Ab1(N@08gR7l~` z9OQtwM5swTo8Pw?bbsMa_X@PPUHSp|QO3#aLNO}pH()yRNp0P7_)1nbtumcHq)`lW zS7X^rI^9ex;>`bTy~9}m(pcPPjgGw~WVy0&qEj0-8_wv%R#q%@OpP4bey7iQmi8+9 zENol|ll;>Z*#5}`bnk@MdaEVZI^RK^)!4yHbKr-Ax;%%2x4MGiwl+)Qw!BFAZ2E#A zJol0X7@bF>8_Sg0mK_w4oWDc3zwwFSMIB}ac_wEBu^kx%a_TK@MvL0_wmqK_uS9v# zHln%Rv>-JsI}&;RJN+r3*OEh^2jlpb>$v1 z6pR;{c=08ArMVsjGU7)iq?jcQXEghkAvEs>`O!+uqt;ZNK*hxJ}LOdEAQ; zPyo7&`WCIs?q+_TQMVci96BbxdT3QCf2gQid3sE1!a~1d7RwSjGyV<$N76V3XTF+f+ zp}A2hsYUiio^3CN2>QWO8&RPY5bg(ryOs3pmTG1Q1f z05MP2LU5+QYi3u1-AVEcFf|IsRO$GXhJ^8;6WN{<+wHctQ2|B`4l|&nGdnHDqF2^@sETwz>T#$I+sp%FIZfA2Mq*4IL^(8CHEI&D-XW!88;`))quX#}98BqFf zRQ&7PpQG*CzNPI8tm2~gJT=UGIlg0YC>a0K+KL_AFiJI!3CCeP0E9R1X>*sv1nViUxTbehs}M?@tlol=MM7Kog? z67-+N3nw)_1a1_?$WzX`n?4DY?$*phRUqPMuNEru+ed4r`RV508B>CYu&wvkl_iso z>KPV1$-Gg?(*t@6Y_Fnaz0a~2a805Ebfy}{u7RP$ZTb-^2mm6IIsc+aeH<3sf4*#W z-m_b5Mj74wvfa|!KjM~Cl#i@aZ`-Ls&{^51mg7-N9c?3&A?(m}7Zi$^kVI;{)yqm0 z?kQh~X!5tYK$;$6rcTZ(m@w4L89cQ0)e=~QPn~?wmkI!o@|%c(hIlZgi0v3AY2(DF zi^G4U0JR4zgHuiUh)1kWM@H{#p-p`)3ZeHyz|mIWX7lE<%FV;2f#CT(>DYp8>W%ji zkBSo<(nNzE>~pJiG>#T2`6qqe-}4y(DPRYqOHPsw{`H3cUu~({?gJycdB5|Lc4@RR z*5NE-FQy;IJlok)!wm=BaWX%r@XMdbFzc8{0rq;BD&@OD{OL!iH;n1!Txkn`7cSL& zRR*5oY2QKv=y^dV3|p)>qTx8+0_Rgd91`Bua^ly^j$36~P`d0u2mr=%PHJB}#MiJC zdUGf7ti+E$AJ`;T!2{N&UIs!DqL(r1i+e)EVh}N#smL+G54LjA3*~`^l3-kCQX*16 z`Xw&+6}uoGN6zim(fG(6wNu`we+13pw?n#Vl~c|{w9oE%xF<^?F>bb<5}|<=oHEb6 z!Kv*E@SOH$9nEFGHF5XH5vde`@9Y{MIAxVS@Cev4|8naABz7+7lJN635$BgotiBFF zC5-|woa%cN1y_Or?5{q2H~Yu@yw<(`)jD8gnqBVwKS%dV9V7lYfk<6Z{8q~NU6Rfy z_AWd#fIHWtL<25>{(7mliW8MMaI!GFe1#Uq|MEp^`(^2}0u_k0b;d{>+Q$R%$8k!* zw4*{SIYobA>!c>Q{LTw0XsHLTDr1!EC_9IfH|hw9G%}jSarrOurW<5EH(Txnzc-J1 z&g!BCiQQ(R4*b>=MDzG(p!icWYuCpCqf!wwJ1@F5~fQ= zHd(ST6CEAV;K~zHb*<80K8QLfy?~r_<3kTh9O~xatkB;bZHy^kFNM!V#337J!!KsP zODHt8f0G`P(f$1#AJoCOUJIR1R@UB4@Ui@8Q^G}bnVuu`WprXfKJ3gXKfllD2TCOQ zuQaHTs29J0=T>Fhc#@rgNF-JgGq{{3dN!jcU0#!geN~IFBxin9y;W+rY|vIX+@qsN zI=?9*6BPv1TSCIdwch))?;Y?60I?)9q%>PY1rMiWdI4ilub+b;s-zG9s!u8Js%Ji% zKGir*%<3Eh-pT?l-tq*l+sY)O+u9-_-pU*f677Bf{MNNcuc2NlJ1c6U?U~VX?O3eV z(m^2YKXbOOFX^G)>%Nin_0Uyu$G(+@&2lgMzLndxpQ5KNI1Q^4Xl)PwzJzhhC<TFA9<33oPHNVt3~eTXRSI0n02zV5!@@?~G7@juJ;;8_ zuYUMtGsjLIx&4Qh&SL@J<$1RvHM;9MLN$Y^z|Po<+Ka!Ij~a$bDmph3@)i<@&`pkY zoyO*qRqEK3B_=P$-D#om^xkxQq}7<1M!p7WH6PtVU|KxyS$@% z{6yN0HNf7$=1qu_+|v`rk1S!#Tq4PL{gqNF|8C+z*OM}L`}e*e_~0|Jy2;bxgR6HB z$GZ^n>t~rI?TT^^0}FnI`|_b^6*5PH+qt1;Ev_|-1NlFr#_qhPwxv3#ZfV{fj9jsM z*@gE%Fd`4Y8iwpky3Ns3q@9en=Ty`{;vYgxS`m}ZbSCrg23+3XgSR#ZpU8YlUL=Nx z9bc)*Ic%>h&2Y2D>~T-D`{<}@YyTL$ls;b2FY}O0Ex8+t5B5P0>oxjIcOJoTc*`>i?nnrdlX7%9LHG4hD!$Oi#BZ>5H?#1h^4&d`hM zrk%WSxkAUQb+=i#&y~4^wj>Lu@mNG4@@fSJ`PR2Oi(*=j%MvO4LGaBLDG`X42jJ}g zV@F66DTzt7n79~#9JoJ0tR5`9c2}5}ckWCL)(E63o10?-?aacPek&?zBbt1d&#%;4 z1TL-82I74Z3Xe81m0Evp=OKf8f-bdwbK3F55*0(=>ffz zg6hAmWLO5-!c{c%Bg6mlBqWf)PFz^1M!A^Bb$jnoa7T<3&~3t!HG~vYhB-3(Ux&cH ztl>reDc|AbWaa1vvWBH@#zSJsgKp8)vrTRF%+@PM&Bbb(Yb~cW)RO1d4LcPTbKWj@ zO7k5qdbefP)$I}t;G{7=m@?64ijS>*HZg>M0VDWZT;AatM(v7HSnL&3M(sbN0OJnq zbN-z9k8=fCg4L2pyS^n7V>*`s##p38-jf6h>+*msZ10(l&D+f+ zh_cn3y(N{Z6%qCB?RXt#5UYaoOHDF08_IkX&F0?X*XLB0F}OGp52ZZ60sv}0qxyly z2VCO|M?pQLV7TsT3?hV<{pMEr9)Z`)u%e0S`NFrp47|8}hSZBZvHWdP_un_U#R)G& zy0*wnh|4BAOfemZw#RtChP2YkNiGD*c)4wEW$IMA0R_56-H=R5wn`TnjMJ7qC0+dr z8VW(XA4i_oP?H$3V7#!_2oL(HI8X;-`P21$n@`X%#^bh7Y11C>`Q4j<3pYP>!zayq zh8H>b1zZ$MuNN8i>C-0%Au5XB52Em~KmVsEZ&<8{ju}J69a~nulwuSyMx55n31*r$ zXyE*ofzthYW^p{_EQ0Rdc?Tfzmp`8@SrqnSLqS{nxQ zb+^?iq^=+^8ihvN!Aj)UT3#CY9K z)Bf_4jkBdE>7w5C+gi)Jx6>W}sQ8+zY0BFR;t@ARPn%HOR_4eW)_i#Fn%V@*JMKkl zPiDz+mTG&{PegN9>e|M49w;Z^i$~+NGGu%Nem4a|2n&JW8Q<&&+@3{r zkx0oeZjMgf;L>kmpen4@?!O`SuN^m*K7KRJakRQqG3`?hmRsa7|M9)tXm>s7`e;z= zM<77bRnBBwT5u>BBjMh@P#{(b?rzQMIv<=Ob_b4I=)=CFR-wOpttIv>M+32J)UONf zHv(5|G5As4R)L$9EGZqPxF5pUJ97r&Ktck$FBWGUImeCVwkL7j(I%qNTPfN}rd2&W zOI0T3=Gyhx+&aUNE1P!wZY8Oy7tXf<=CR*~O>@m$?^fT~!>!BpTt>fZ7c8gWp1!Ou zm<+4dTF;g1wS2bovnbyIbjb6=rje=H{sU~kt9V{xf5Iq`3yOl-R}2FAqWmnKq-n$!@b>XsJggSUSD6C+lpA#+_diy0n|1x zLR))xLm_(`@$^2Gw-=)0!Uv3v(oDG`jx?<7Q0sTxotzt7QdK{oT8t))PUG4NW~e26 zdtiEp0))fh9JcHqC{)!*{Zg&@MLQ^|Re zO+Sf9bLs53s~O1XJ$9(xbENpsOs#fTJ93$9=qMZQTH_fnvfUcZ9^n~Jgee=3WN?4f z<;M-dz*5O#~S zH&6RJf)mG$C!2U&l7E!)N*h7}{A06kH4J;&@mTCgfPQ(Z0fipnNQ}rBfXygf>6Fs+ z2?N=6V;ycgUsXo*=sKa^O6i+uRJt=Zv zAGmWBcZCs{I;54^Q*ZisMdTcQ)~8twJRgywJh(2Gi7_eXxaTmuMq-d7(F3&S8et<6 zNI_2^0YoKifcFtf>*|KS(y%k4X}mj@cL2Lct7_mRGazn&{+*U3d*C- z^Znp%P7D6!*Ir8-MelrUNxr4&&DU1x1zbGflwn5dEKy9%Px=?keo&2X4#0>k77+=h zt~=zp*=2I&A+AHa&O+q^*T--Z*8$Vqbc|SxNS(!Who_=}rISq23^dPdKshPz<*PdD zTVc8;=wH>L;3a^~1gej@r>pvLG%IoDZ7NQ~5LvgI)3s@WTv#8GqB8T1W;J&`{>b@? zM!XV7&NP~k#?RBHA)~`!k)DcaKap{z0y(|<6a{+OYOA$O4T4Cnp&XFyHMQdj*d)0e zkgfcY`1twVcPYa_;I80?)ayA+B`7Zv#i61wcNQn^l#Tk{xAaH?eb5#u3j_i?4)&cF44N3^cWTj&>c8j!g^3IJ*6r#9$6TVFkLYIk>oG>r@1I} zOa}}@+UfPfJ0ggbk65d2NI|Wmr(1y+*~`g)xsycY;2aMnYNhMSPvNnx8F)s z`w8L_65B5YuM9J^S0RZuFPbid>!mMMdRdDOM$}X~ExZm6;gr+Y;7Jqb2jU+k%kjfO zTRbmM(*D*jrr%E|BF|nv0UVmMEO9Rtz>a)=?x2tYmg+fqhmr6}{ zx&SD+V{5$q+Va-a68=?${AiFtMW>w%Eik67)F z(h8(RO=moj3s^(*1;$gF-1sZNF!IOFC39^-W6|KoU|S5!O>Rw~!|w8DQkZ?ty6n#o zzm$w>0}HznK$_+>EjFQQ2rddv3(h13C&3*LPjNI;tAXs}e$VDYyI zTRybWA9UapZFnvuM5IG)3o5#|CGL?q0=TwHfC?XX4}n=5E_ETsbul z^fXr3IMZI~ZGKL~=w|O%l&}l$`-BtV4*rVJV_jFL`eR-TQQ3}?(e4~;CD{3@B_ivaqX<30}yNrfU)gX2(EIDa! z^vg+&yf-E&pQn;@*%`EwkYjjgiW6F`tM%eoo(HvY_4r0 z9DA*sd(T#&!5`8MHfSv}IL|h}-^D;idz0t%Ly+(QOZ^4cZ=-u5A!CbNo{-!^oHqhF z^FsbU{zB+8UzIB3a3T+QG`MfUD_!y==)A}-C@}3p0*OZ!lek$UFKn(6-?{Bl6~keb zDdgV711Tqc@Ss}qK+2S2E3tMQ*7p6V)Km2VdiEoIk}@a)#a=vQ!GyLeqw3aVmGagI zE{aEIxfze1{SrR?jE}-YnmB1r4O2>f-=+3>poVPX)9|06L2U)CV^ytTpE&&)lHdVQ z*k4ErAju5653kW2fEc1XSp8Yf7OR-1LyF`z)jQ48%`#v#a<^j^5b)yyOGGlzovW3>|4Cc#tS5HC&w>}o>0aJ#(wuN%pB-r zHD_2DRv=gk+|@O-K~s+ETPx>v=iIW|;3t=3K{i9D@e>0}@anV9X34~}#(