diff --git a/.gitignore b/.gitignore index 2e61c87..4cc71dd 100644 --- a/.gitignore +++ b/.gitignore @@ -106,3 +106,6 @@ venv.bak/ *.pyc exp/ backup_exp/ + +# MacOS +.DS_Store \ No newline at end of file diff --git a/README_bay.md b/README_bay.md new file mode 100644 index 0000000..390ffe2 --- /dev/null +++ b/README_bay.md @@ -0,0 +1,91 @@ +# Bayesian-Crowd-Counting (ICCV 2019 oral) +[Arxiv](https://arxiv.org/abs/1908.03684) | [CVF](http://openaccess.thecvf.com/content_ICCV_2019/papers/Ma_Bayesian_Loss_for_Crowd_Count_Estimation_With_Point_Supervision_ICCV_2019_paper.pdf) +### Official Implement of ICCV 2019 oral paper "Bayesian Loss for Crowd Count Estimation with Point Supervision" + +## Visualization +### Bayesian + +![](imgs/bayesian.png) + +### Bayesian+ + +![](imgs/bayesian+.png) + +### Density + +![](imgs/density.png) + +## Citation +If you use this code for your research, please cite our paper: + +``` +@inproceedings{ma2019bayesian, + title={Bayesian loss for crowd count estimation with point supervision}, + author={Ma, Zhiheng and Wei, Xing and Hong, Xiaopeng and Gong, Yihong}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={6142--6151}, + year={2019} +} +``` + +## Code + +### Install dependencies + +torch >= 1.0 torchvision opencv numpy scipy, all the dependencies can be easily installed by pip or conda + +This code was tested with python 3.6 + +### Train and Test + +1、 Dowload Dataset UCF-QNRF [Link](https://www.crcv.ucf.edu/data/ucf-qnrf/) + +2、 Pre-Process Data (resize image and split train/validation) + +``` +python preprocess_dataset.py --origin_dir --data_dir +``` + +3、 Train model (validate on single GTX Titan X) + +``` +python train.py --data_dir --save_dir +``` + +4、 Test Model +``` +python test.py --data_dir --save_dir +``` +The result is slightly influenced by the random seed, but fixing the random seed (have to set cuda_benchmark to False) will make training time extrodinary long, so sometimes you can get a slightly worse result than the reported result, but most of time you can get a better result than the reported one. If you find this code is useful, please give us a star and cite our paper, have fun. + +5、 Training on ShanghaiTech Dataset + +Change dataloader to crowd_sh.py + +For shanghaitech a, you should set learning rate to 1e-6, and bg_ratio to 0.1 + +### Pretrain Weight +#### UCF-QNRF + +Baidu Yun [Link](https://pan.baidu.com/s/1Evxxu1skHni3Iv3VxdcZvA) extract code: x9wc + +Google Drive [Link](https://drive.google.com/file/d/1i22E7_zigkSm7nBnqMaEv00MD3CPhIDk/view?usp=sharing) + +#### ShanghaiTech A + +Baidu Yun [Link](https://pan.baidu.com/s/1GlaxGzFI8qFCHbqu56qSRw) extract code: tx0m + +Goodle Drive [Link](https://drive.google.com/file/d/13bEdshBY-brUvLSwTCOqDlK5QKcZIAAH/view?usp=sharing) + +#### ShanghaiTech B + +Baidu Yun [Link](https://pan.baidu.com/s/1YYg-a-sdhBAHZRJzZOU-6Q) extract code: a15u + +Goodle Drive [Link](https://drive.google.com/file/d/1woK-bI_JyeY9wZL2pXsWgPzQqhD8Qy0u/view?usp=sharing) + +### License + +GNU GENERAL PUBLIC LICENSE +Version 3, 29 June 2007 +Copyright © 2007 Free Software Foundation, Inc. + diff --git a/bayesian/train.txt b/bayesian/train.txt new file mode 100644 index 0000000..152dee1 --- /dev/null +++ b/bayesian/train.txt @@ -0,0 +1,1081 @@ +img_0526.jpg +img_0639.jpg +img_0826.jpg +img_0415.jpg +img_0720.jpg +img_0123.jpg +img_0529.jpg +img_1071.jpg +img_0501.jpg +img_0804.jpg +img_0873.jpg +img_0601.jpg +img_0177.jpg +img_0173.jpg +img_0675.jpg +img_1001.jpg +img_0096.jpg +img_1139.jpg +img_0001.jpg +img_0084.jpg +img_0395.jpg +img_0166.jpg +img_0368.jpg +img_0093.jpg +img_0004.jpg +img_0572.jpg +img_0956.jpg +img_0721.jpg +img_0120.jpg +img_0554.jpg +img_0308.jpg +img_0131.jpg +img_0992.jpg +img_0156.jpg +img_0532.jpg +img_0476.jpg +img_0427.jpg +img_1162.jpg +img_0660.jpg +img_0538.jpg +img_0298.jpg +img_0306.jpg +img_1173.jpg +img_1157.jpg +img_0777.jpg +img_0859.jpg +img_0537.jpg +img_0236.jpg +img_0986.jpg +img_0370.jpg +img_0491.jpg +img_1150.jpg +img_0719.jpg +img_1083.jpg +img_0107.jpg +img_1029.jpg +img_0927.jpg +img_0893.jpg +img_0286.jpg +img_1135.jpg +img_0640.jpg +img_0530.jpg +img_1115.jpg +img_0533.jpg +img_0105.jpg +img_0945.jpg +img_1035.jpg +img_0484.jpg +img_1168.jpg +img_0760.jpg +img_0939.jpg +img_0907.jpg +img_0401.jpg +img_0429.jpg +img_0828.jpg +img_1167.jpg +img_0144.jpg +img_0553.jpg +img_0421.jpg +img_0560.jpg +img_0743.jpg +img_0817.jpg +img_0657.jpg +img_0106.jpg +img_0079.jpg +img_0473.jpg +img_0865.jpg +img_0730.jpg +img_0989.jpg +img_0243.jpg +img_0182.jpg +img_0252.jpg +img_0812.jpg +img_0508.jpg +img_0744.jpg +img_0439.jpg +img_0181.jpg +img_0965.jpg +img_0487.jpg +img_0710.jpg +img_1054.jpg +img_0947.jpg +img_0321.jpg +img_0758.jpg +img_0014.jpg +img_0504.jpg +img_0674.jpg +img_0991.jpg +img_0358.jpg +img_1138.jpg +img_0019.jpg +img_0677.jpg +img_0336.jpg +img_0070.jpg +img_0766.jpg +img_0612.jpg +img_1109.jpg +img_0840.jpg +img_0616.jpg +img_0926.jpg +img_0376.jpg +img_0761.jpg +img_0020.jpg +img_0795.jpg +img_0046.jpg +img_0459.jpg +img_0267.jpg +img_0428.jpg +img_1122.jpg +img_0247.jpg +img_1143.jpg +img_0290.jpg +img_0524.jpg +img_0275.jpg +img_1120.jpg +img_0115.jpg +img_0698.jpg +img_0092.jpg +img_0922.jpg +img_1052.jpg +img_0297.jpg +img_0112.jpg +img_0180.jpg +img_0520.jpg +img_0351.jpg +img_0478.jpg +img_0588.jpg +img_0109.jpg +img_0738.jpg +img_0592.jpg +img_0752.jpg +img_1028.jpg +img_1164.jpg +img_0450.jpg +img_0168.jpg +img_1108.jpg +img_0799.jpg +img_0649.jpg +img_0272.jpg +img_0902.jpg +img_0874.jpg +img_0870.jpg +img_0821.jpg +img_0153.jpg +img_0426.jpg +img_0949.jpg +img_0527.jpg +img_1198.jpg +img_0443.jpg +img_0063.jpg +img_0013.jpg +img_0564.jpg +img_0040.jpg +img_0764.jpg +img_0411.jpg +img_0118.jpg +img_1172.jpg +img_0196.jpg +img_0879.jpg +img_0985.jpg +img_0437.jpg +img_0918.jpg +img_0493.jpg +img_0271.jpg +img_0860.jpg +img_0059.jpg +img_0645.jpg +img_1126.jpg +img_0911.jpg +img_1082.jpg +img_0383.jpg +img_0422.jpg +img_0139.jpg +img_1192.jpg +img_0904.jpg +img_0503.jpg +img_0512.jpg +img_0541.jpg +img_0330.jpg +img_0348.jpg +img_0425.jpg +img_0673.jpg +img_0210.jpg +img_0950.jpg +img_0151.jpg +img_0792.jpg +img_0469.jpg +img_0661.jpg +img_0003.jpg +img_0089.jpg +img_0312.jpg +img_0555.jpg +img_0215.jpg +img_0023.jpg +img_1129.jpg +img_0249.jpg +img_0451.jpg +img_1032.jpg +img_0689.jpg +img_1189.jpg +img_0391.jpg +img_0146.jpg +img_0653.jpg +img_0248.jpg +img_0695.jpg +img_0402.jpg +img_0075.jpg +img_1018.jpg +img_1020.jpg +img_0163.jpg +img_0440.jpg +img_0756.jpg +img_0253.jpg +img_0712.jpg +img_0962.jpg +img_0471.jpg +img_0842.jpg +img_0525.jpg +img_1176.jpg +img_1021.jpg +img_0127.jpg +img_0295.jpg +img_1045.jpg +img_1088.jpg +img_1090.jpg +img_0622.jpg +img_0650.jpg +img_0518.jpg +img_0854.jpg +img_0262.jpg +img_0323.jpg +img_0522.jpg +img_0933.jpg +img_0951.jpg +img_0366.jpg +img_0325.jpg +img_1034.jpg +img_0827.jpg +img_0194.jpg +img_0636.jpg +img_0051.jpg +img_0683.jpg +img_0558.jpg +img_0309.jpg +img_0345.jpg +img_0438.jpg +img_1091.jpg +img_0577.jpg +img_0500.jpg +img_0279.jpg +img_1145.jpg +img_0886.jpg +img_1161.jpg +img_0617.jpg +img_0726.jpg +img_0620.jpg +img_0444.jpg +img_1118.jpg +img_0506.jpg +img_0164.jpg +img_0507.jpg +img_0614.jpg +img_0769.jpg +img_1131.jpg +img_0185.jpg +img_0694.jpg +img_1055.jpg +img_0754.jpg +img_0569.jpg +img_0317.jpg +img_0228.jpg +img_0492.jpg +img_1190.jpg +img_0566.jpg +img_0921.jpg +img_0818.jpg +img_0204.jpg +img_0974.jpg +img_0866.jpg +img_1039.jpg +img_0101.jpg +img_0169.jpg +img_0375.jpg +img_0334.jpg +img_1078.jpg +img_0061.jpg +img_0113.jpg +img_0981.jpg +img_0080.jpg +img_0324.jpg +img_0316.jpg +img_0643.jpg +img_0408.jpg +img_0890.jpg +img_0363.jpg +img_0765.jpg +img_0822.jpg +img_0430.jpg +img_0245.jpg +img_0671.jpg +img_0486.jpg +img_1201.jpg +img_0129.jpg +img_1142.jpg +img_0843.jpg +img_1133.jpg +img_0238.jpg +img_0955.jpg +img_1017.jpg +img_0858.jpg +img_1154.jpg +img_0559.jpg +img_0002.jpg +img_0407.jpg +img_1146.jpg +img_1086.jpg +img_0495.jpg +img_0857.jpg +img_0133.jpg +img_0121.jpg +img_0973.jpg +img_0830.jpg +img_0165.jpg +img_0278.jpg +img_1012.jpg +img_0393.jpg +img_0202.jpg +img_0700.jpg +img_0313.jpg +img_0024.jpg +img_0055.jpg +img_0979.jpg +img_0162.jpg +img_0135.jpg +img_0098.jpg +img_0727.jpg +img_0969.jpg +img_1137.jpg +img_0932.jpg +img_1102.jpg +img_0301.jpg +img_0047.jpg +img_0595.jpg +img_0805.jpg +img_0801.jpg +img_1151.jpg +img_0387.jpg +img_0999.jpg +img_0136.jpg +img_1037.jpg +img_1087.jpg +img_1186.jpg +img_0032.jpg +img_0195.jpg +img_0360.jpg +img_0276.jpg +img_0642.jpg +img_0913.jpg +img_0231.jpg +img_0670.jpg +img_1123.jpg +img_0517.jpg +img_0707.jpg +img_0088.jpg +img_0594.jpg +img_0838.jpg +img_0848.jpg +img_0354.jpg +img_0936.jpg +img_0876.jpg +img_1081.jpg +img_0322.jpg +img_0637.jpg +img_0739.jpg +img_0917.jpg +img_0244.jpg +img_0591.jpg +img_0628.jpg +img_0964.jpg +img_0691.jpg +img_0609.jpg +img_0342.jpg +img_1097.jpg +img_1077.jpg +img_0502.jpg +img_0423.jpg +img_0561.jpg +img_1059.jpg +img_0568.jpg +img_0920.jpg +img_0389.jpg +img_0940.jpg +img_0787.jpg +img_0634.jpg +img_0516.jpg +img_0900.jpg +img_0463.jpg +img_0942.jpg +img_0796.jpg +img_0835.jpg +img_0789.jpg +img_0184.jpg +img_0397.jpg +img_1195.jpg +img_1089.jpg +img_0319.jpg +img_0328.jpg +img_0724.jpg +img_0852.jpg +img_0662.jpg +img_0225.jpg +img_0479.jpg +img_0266.jpg +img_0499.jpg +img_0134.jpg +img_1023.jpg +img_1064.jpg +img_0400.jpg +img_0226.jpg +img_0015.jpg +img_0203.jpg +img_0548.jpg +img_1084.jpg +img_0970.jpg +img_0718.jpg +img_0138.jpg +img_0095.jpg +img_0831.jpg +img_0482.jpg +img_1000.jpg +img_0234.jpg +img_0183.jpg +img_0687.jpg +img_0923.jpg +img_0197.jpg +img_1016.jpg +img_1100.jpg +img_0034.jpg +img_0587.jpg +img_0229.jpg +img_1178.jpg +img_0124.jpg +img_0424.jpg +img_0496.jpg +img_0179.jpg +img_1110.jpg +img_0998.jpg +img_0742.jpg +img_0578.jpg +img_0207.jpg +img_0305.jpg +img_0373.jpg +img_0971.jpg +img_0292.jpg +img_0861.jpg +img_0621.jpg +img_0414.jpg +img_1140.jpg +img_0737.jpg +img_0176.jpg +img_1057.jpg +img_1095.jpg +img_0667.jpg +img_0755.jpg +img_0318.jpg +img_0170.jpg +img_0418.jpg +img_0178.jpg +img_1200.jpg +img_0021.jpg +img_0652.jpg +img_0327.jpg +img_0627.jpg +img_1051.jpg +img_0837.jpg +img_0352.jpg +img_0029.jpg +img_0833.jpg +img_0952.jpg +img_0488.jpg +img_0474.jpg +img_0702.jpg +img_0819.jpg +img_1188.jpg +img_0261.jpg +img_0685.jpg +img_1024.jpg +img_0008.jpg +img_0734.jpg +img_0509.jpg +img_0888.jpg +img_0676.jpg +img_0404.jpg +img_1046.jpg +img_1127.jpg +img_1008.jpg +img_0161.jpg +img_0699.jpg +img_0085.jpg +img_0703.jpg +img_0083.jpg +img_0934.jpg +img_0626.jpg +img_1170.jpg +img_1065.jpg +img_0664.jpg +img_0883.jpg +img_0655.jpg +img_0263.jpg +img_1005.jpg +img_1061.jpg +img_0333.jpg +img_0881.jpg +img_1041.jpg +img_0540.jpg +img_1185.jpg +img_0953.jpg +img_0586.jpg +img_1011.jpg +img_0846.jpg +img_0149.jpg +img_1075.jpg +img_0894.jpg +img_0759.jpg +img_1177.jpg +img_0258.jpg +img_0171.jpg +img_0740.jpg +img_0006.jpg +img_0353.jpg +img_0615.jpg +img_0810.jpg +img_0142.jpg +img_0958.jpg +img_0584.jpg +img_0390.jpg +img_0585.jpg +img_0365.jpg +img_0026.jpg +img_0458.jpg +img_0143.jpg +img_0575.jpg +img_1027.jpg +img_1183.jpg +img_0535.jpg +img_0891.jpg +img_1085.jpg +img_0757.jpg +img_0549.jpg +img_0436.jpg +img_0815.jpg +img_0635.jpg +img_0954.jpg +img_0367.jpg +img_0064.jpg +img_0410.jpg +img_0277.jpg +img_1111.jpg +img_1025.jpg +img_0434.jpg +img_1175.jpg +img_1171.jpg +img_0610.jpg +img_0618.jpg +img_0208.jpg +img_0281.jpg +img_0058.jpg +img_0851.jpg +img_0300.jpg +img_0017.jpg +img_0110.jpg +img_0265.jpg +img_0362.jpg +img_1038.jpg +img_0580.jpg +img_1096.jpg +img_0972.jpg +img_0666.jpg +img_0090.jpg +img_1007.jpg +img_0982.jpg +img_0287.jpg +img_0714.jpg +img_0218.jpg +img_0832.jpg +img_0145.jpg +img_0072.jpg +img_0222.jpg +img_0137.jpg +img_0741.jpg +img_0028.jpg +img_0413.jpg +img_0232.jpg +img_0573.jpg +img_0849.jpg +img_0855.jpg +img_0770.jpg +img_0283.jpg +img_0914.jpg +img_0611.jpg +img_1047.jpg +img_0596.jpg +img_0706.jpg +img_0847.jpg +img_0868.jpg +img_0193.jpg +img_0780.jpg +img_0100.jpg +img_0786.jpg +img_0337.jpg +img_0728.jpg +img_0656.jpg +img_0602.jpg +img_1015.jpg +img_0273.jpg +img_0797.jpg +img_0398.jpg +img_0693.jpg +img_0944.jpg +img_0593.jpg +img_0768.jpg +img_0995.jpg +img_1125.jpg +img_0078.jpg +img_0543.jpg +img_0167.jpg +img_0420.jpg +img_0264.jpg +img_0016.jpg +img_0599.jpg +img_0417.jpg +img_0448.jpg +img_0748.jpg +img_0311.jpg +img_0071.jpg +img_0749.jpg +img_0941.jpg +img_0237.jpg +img_0214.jpg +img_1149.jpg +img_0241.jpg +img_0461.jpg +img_0018.jpg +img_0356.jpg +img_0483.jpg +img_0099.jpg +img_0130.jpg +img_0372.jpg +img_0800.jpg +img_0654.jpg +img_0544.jpg +img_1099.jpg +img_1068.jpg +img_0326.jpg +img_0374.jpg +img_0074.jpg +img_0938.jpg +img_0117.jpg +img_0456.jpg +img_0901.jpg +img_0713.jpg +img_0788.jpg +img_0665.jpg +img_0294.jpg +img_0841.jpg +img_0269.jpg +img_0579.jpg +img_1098.jpg +img_0466.jpg +img_0480.jpg +img_0709.jpg +img_0672.jpg +img_1010.jpg +img_0314.jpg +img_0043.jpg +img_0349.jpg +img_0172.jpg +img_1187.jpg +img_0371.jpg +img_0320.jpg +img_1103.jpg +img_1159.jpg +img_0629.jpg +img_0399.jpg +img_0663.jpg +img_0335.jpg +img_1148.jpg +img_0108.jpg +img_0254.jpg +img_0432.jpg +img_0915.jpg +img_0624.jpg +img_0997.jpg +img_0711.jpg +img_0704.jpg +img_1147.jpg +img_0036.jpg +img_0519.jpg +img_0680.jpg +img_0498.jpg +img_0651.jpg +img_0230.jpg +img_0198.jpg +img_0905.jpg +img_0751.jpg +img_0928.jpg +img_0630.jpg +img_0140.jpg +img_0644.jpg +img_0776.jpg +img_0057.jpg +img_0361.jpg +img_0209.jpg +img_0158.jpg +img_1160.jpg +img_1169.jpg +img_0735.jpg +img_0551.jpg +img_0681.jpg +img_0515.jpg +img_0077.jpg +img_0968.jpg +img_0240.jpg +img_1166.jpg +img_0937.jpg +img_0877.jpg +img_0513.jpg +img_0528.jpg +img_0150.jpg +img_1165.jpg +img_0200.jpg +img_0246.jpg +img_0869.jpg +img_0011.jpg +img_0160.jpg +img_0464.jpg +img_0285.jpg +img_0132.jpg +img_0701.jpg +img_0082.jpg +img_1182.jpg +img_0030.jpg +img_0126.jpg +img_0632.jpg +img_0731.jpg +img_0875.jpg +img_0978.jpg +img_0717.jpg +img_0460.jpg +img_1044.jpg +img_1194.jpg +img_0910.jpg +img_0049.jpg +img_0331.jpg +img_0213.jpg +img_0885.jpg +img_0468.jpg +img_0419.jpg +img_1158.jpg +img_0022.jpg +img_0174.jpg +img_0747.jpg +img_1006.jpg +img_0381.jpg +img_1036.jpg +img_0863.jpg +img_0994.jpg +img_0783.jpg +img_0346.jpg +img_0233.jpg +img_0820.jpg +img_1107.jpg +img_1193.jpg +img_0943.jpg +img_1191.jpg +img_0005.jpg +img_0087.jpg +img_0039.jpg +img_0813.jpg +img_0239.jpg +img_0206.jpg +img_0256.jpg +img_1070.jpg +img_0409.jpg +img_0377.jpg +img_0446.jpg +img_0216.jpg +img_0189.jpg +img_0785.jpg +img_0041.jpg +img_0598.jpg +img_0310.jpg +img_0307.jpg +img_1093.jpg +img_0465.jpg +img_0746.jpg +img_0380.jpg +img_0732.jpg +img_0781.jpg +img_0906.jpg +img_0619.jpg +img_0604.jpg +img_0983.jpg +img_0753.jpg +img_0211.jpg +img_0552.jpg +img_0892.jpg +img_0767.jpg +img_1180.jpg +img_1069.jpg +img_0154.jpg +img_0899.jpg +img_0343.jpg +img_0025.jpg +img_1196.jpg +img_0155.jpg +img_0433.jpg +img_0597.jpg +img_0570.jpg +img_0867.jpg +img_0223.jpg +img_0581.jpg +img_0186.jpg +img_0122.jpg +img_1134.jpg +img_0340.jpg +img_0957.jpg +img_0364.jpg +img_0069.jpg +img_1114.jpg +img_0646.jpg +img_0679.jpg +img_0623.jpg +img_0392.jpg +img_0814.jpg +img_0589.jpg +img_0299.jpg +img_0931.jpg +img_0836.jpg +img_0963.jpg +img_0094.jpg +img_0987.jpg +img_0930.jpg +img_0976.jpg +img_0924.jpg +img_0384.jpg +img_0035.jpg +img_0076.jpg +img_1101.jpg +img_0405.jpg +img_0350.jpg +img_0147.jpg +img_0659.jpg +img_1013.jpg +img_0948.jpg +img_0066.jpg +img_1132.jpg +img_0829.jpg +img_0690.jpg +img_1060.jpg +img_0457.jpg +img_0897.jpg +img_0825.jpg +img_1163.jpg +img_0803.jpg +img_0563.jpg +img_0574.jpg +img_0175.jpg +img_1112.jpg +img_0668.jpg +img_0045.jpg +img_0259.jpg +img_0341.jpg +img_1067.jpg +img_1040.jpg +img_1106.jpg +img_0205.jpg +img_0296.jpg +img_0255.jpg +img_1152.jpg +img_0772.jpg +img_0613.jpg +img_1121.jpg +img_0834.jpg +img_0406.jpg +img_0762.jpg +img_0442.jpg +img_0192.jpg +img_0044.jpg +img_0774.jpg +img_0606.jpg +img_0359.jpg +img_0467.jpg +img_0779.jpg +img_0060.jpg +img_1074.jpg +img_0494.jpg +img_1153.jpg +img_0102.jpg +img_0582.jpg +img_0386.jpg +img_0212.jpg +img_0625.jpg +img_0844.jpg +img_0872.jpg +img_1105.jpg +img_0396.jpg +img_1119.jpg +img_0052.jpg +img_0454.jpg +img_1179.jpg +img_0862.jpg +img_0481.jpg +img_1026.jpg +img_0511.jpg +img_0912.jpg +img_1124.jpg +img_0148.jpg +img_0960.jpg +img_0523.jpg +img_0531.jpg +img_0729.jpg +img_0571.jpg +img_0908.jpg +img_0889.jpg +img_0188.jpg +img_0037.jpg +img_0716.jpg +img_1014.jpg +img_0394.jpg +img_1056.jpg +img_0462.jpg +img_0850.jpg +img_0784.jpg +img_1002.jpg +img_0763.jpg +img_0159.jpg +img_0009.jpg +img_0708.jpg +img_1050.jpg +img_0678.jpg +img_0648.jpg +img_0010.jpg +img_1031.jpg +img_0445.jpg +img_0355.jpg +img_1117.jpg +img_0378.jpg +img_0550.jpg +img_0217.jpg +img_0260.jpg +img_0816.jpg +img_0996.jpg +img_0081.jpg +img_0878.jpg +img_0199.jpg +img_0431.jpg +img_1144.jpg +img_0688.jpg +img_0745.jpg +img_0686.jpg +img_1042.jpg +img_0187.jpg +img_1066.jpg +img_0682.jpg +img_0048.jpg +img_0896.jpg +img_0608.jpg +img_1003.jpg +img_1156.jpg +img_0723.jpg +img_0692.jpg +img_0220.jpg +img_0993.jpg +img_1197.jpg +img_0447.jpg +img_0369.jpg +img_0056.jpg +img_0807.jpg +img_0315.jpg +img_0567.jpg +img_0452.jpg +img_1128.jpg +img_0647.jpg +img_0242.jpg +img_0201.jpg +img_0497.jpg +img_0031.jpg +img_0771.jpg +img_0547.jpg +img_0705.jpg +img_0725.jpg +img_1058.jpg +img_0053.jpg +img_1043.jpg +img_0722.jpg +img_0435.jpg +img_0284.jpg +img_0583.jpg +img_0882.jpg +img_0111.jpg +img_0959.jpg +img_1076.jpg +img_0880.jpg +img_0224.jpg +img_0977.jpg +img_0270.jpg +img_0793.jpg +img_0603.jpg +img_1116.jpg +img_0304.jpg +img_0884.jpg +img_1136.jpg +img_0235.jpg +img_0412.jpg +img_0980.jpg +img_0988.jpg +img_0773.jpg +img_1174.jpg +img_0562.jpg +img_0871.jpg +img_0798.jpg +img_0453.jpg +img_0696.jpg +img_0104.jpg +img_0607.jpg +img_0669.jpg +img_0293.jpg +img_1141.jpg +img_0329.jpg +img_0534.jpg +img_1113.jpg +img_0288.jpg +img_0961.jpg +img_0388.jpg +img_0073.jpg +img_0141.jpg +img_0935.jpg +img_1062.jpg +img_0227.jpg +img_0895.jpg +img_0449.jpg +img_0565.jpg +img_1009.jpg +img_0282.jpg +img_0806.jpg +img_1033.jpg +img_0332.jpg +img_0903.jpg +img_0475.jpg +img_0050.jpg +img_0455.jpg +img_0845.jpg +img_0946.jpg +img_0490.jpg +img_0274.jpg +img_0909.jpg +img_0966.jpg +img_0219.jpg +img_0898.jpg +img_0403.jpg diff --git a/bayesian/val.txt b/bayesian/val.txt new file mode 100644 index 0000000..fa004ff --- /dev/null +++ b/bayesian/val.txt @@ -0,0 +1,120 @@ +img_0042.jpg +img_0697.jpg +img_0012.jpg +img_0062.jpg +img_0990.jpg +img_1048.jpg +img_0576.jpg +img_0802.jpg +img_0116.jpg +img_0119.jpg +img_0967.jpg +img_0054.jpg +img_0782.jpg +img_0514.jpg +img_0929.jpg +img_0809.jpg +img_0033.jpg +img_0125.jpg +img_0633.jpg +img_0038.jpg +img_0775.jpg +img_0600.jpg +img_0157.jpg +img_0824.jpg +img_0103.jpg +img_0984.jpg +img_0250.jpg +img_0505.jpg +img_0631.jpg +img_0556.jpg +img_1049.jpg +img_1181.jpg +img_0097.jpg +img_0536.jpg +img_1104.jpg +img_0733.jpg +img_1130.jpg +img_0808.jpg +img_0086.jpg +img_0302.jpg +img_0114.jpg +img_0470.jpg +img_0715.jpg +img_0641.jpg +img_0557.jpg +img_0510.jpg +img_0152.jpg +img_0485.jpg +img_0190.jpg +img_0065.jpg +img_0839.jpg +img_0068.jpg +img_0864.jpg +img_0477.jpg +img_0441.jpg +img_0546.jpg +img_0091.jpg +img_0853.jpg +img_0975.jpg +img_0357.jpg +img_1004.jpg +img_0794.jpg +img_0750.jpg +img_0791.jpg +img_0605.jpg +img_0590.jpg +img_0489.jpg +img_0191.jpg +img_0007.jpg +img_0778.jpg +img_0658.jpg +img_0289.jpg +img_0925.jpg +img_1184.jpg +img_0521.jpg +img_0291.jpg +img_0823.jpg +img_0382.jpg +img_0416.jpg +img_0736.jpg +img_0268.jpg +img_0128.jpg +img_0280.jpg +img_1022.jpg +img_0545.jpg +img_0257.jpg +img_0251.jpg +img_0684.jpg +img_1092.jpg +img_0638.jpg +img_1079.jpg +img_0790.jpg +img_0811.jpg +img_0303.jpg +img_0542.jpg +img_1019.jpg +img_0472.jpg +img_0027.jpg +img_0539.jpg +img_0856.jpg +img_1094.jpg +img_1030.jpg +img_1063.jpg +img_0887.jpg +img_0067.jpg +img_0379.jpg +img_0919.jpg +img_1155.jpg +img_0221.jpg +img_1053.jpg +img_0916.jpg +img_1072.jpg +img_0347.jpg +img_1199.jpg +img_1080.jpg +img_0385.jpg +img_0344.jpg +img_1073.jpg +img_0339.jpg +img_0338.jpg diff --git a/config.py b/config.py index 2da73fd..c7c512d 100644 --- a/config.py +++ b/config.py @@ -25,7 +25,7 @@ __C.PRE_GCC = False # use the pretrained model on GCC dataset __C.PRE_GCC_MODEL = 'path to model' # path to model -__C.RESUME = False # contine training +__C.RESUME = False # continue training __C.RESUME_PATH = './exp/04-25_09-19_SHHB_VGG_1e-05/latest_state.pth' # __C.GPU_ID = [0,1] # sigle gpu: [0], [1] ...; multi gpus: [0,1] diff --git a/datasets/FUDAN/FUDAN.py b/datasets/FUDAN/FUDAN.py new file mode 100644 index 0000000..57c5ad1 --- /dev/null +++ b/datasets/FUDAN/FUDAN.py @@ -0,0 +1,68 @@ +import numpy as np +import os +import random +from scipy import io as sio +import sys +import torch +from torch.utils import data +from PIL import Image, ImageOps + +import pandas as pd + +from config import cfg + + +class Fudan(data.Dataset): + def __init__( + self, + data_path, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): + self.img_path = data_path + "/img" + self.gt_path = data_path + "/den" + self.data_files = [ + filename + for filename in os.listdir(self.img_path) + if os.path.isfile(os.path.join(self.img_path, filename)) + ] + self.num_samples = len(self.data_files) + self.main_transform = main_transform + self.img_transform = img_transform + self.gt_transform = gt_transform + + def __getitem__(self, index): + fname = self.data_files[index] + img, den = self.read_image_and_gt(fname) + if self.main_transform is not None: + img, den = self.main_transform(img, den) + if self.img_transform is not None: + img = self.img_transform(img) + if self.gt_transform is not None: + den = self.gt_transform(den) + return img, den + + def __len__(self): + return self.num_samples + + def read_image_and_gt(self, fname): + img = Image.open(os.path.join(self.img_path, fname)) + if img.mode == "L": + img = img.convert("RGB") + + # den = sio.loadmat(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.mat')) + # den = den['map'] + den = pd.read_csv( + os.path.join(self.gt_path, os.path.splitext(fname)[0] + ".csv"), + sep=",", + header=None, + ).values + + den = den.astype(np.float32, copy=False) + den = Image.fromarray(den) + return img, den + + def get_num_samples(self): + return self.num_samples diff --git a/datasets/FUDAN/__init__.py b/datasets/FUDAN/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datasets/FUDAN/loading_data.py b/datasets/FUDAN/loading_data.py new file mode 100644 index 0000000..636caeb --- /dev/null +++ b/datasets/FUDAN/loading_data.py @@ -0,0 +1,62 @@ +import torchvision.transforms as standard_transforms +from torch.utils.data import DataLoader +import misc.transforms as own_transforms +from datasets.FUDAN.FUDAN import Fudan +from datasets.FUDAN.setting import cfg_data +import torch + + +def loading_data(): + mean_std = cfg_data.MEAN_STD + log_para = cfg_data.LOG_PARA + train_main_transform = own_transforms.Compose( + [ + # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), + own_transforms.RandomHorizontallyFlip() + ] + ) + val_main_transform = own_transforms.Compose( + [own_transforms.RandomCrop(cfg_data.TRAIN_SIZE)] + ) + val_main_transform = None + img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] + ) + gt_transform = standard_transforms.Compose( + [own_transforms.LabelNormalize(log_para)] + ) + restore_transform = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] + ) + + train_set = Fudan( + cfg_data.DATA_PATH + "/train", + "train", + main_transform=train_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + train_loader = DataLoader( + train_set, + batch_size=cfg_data.TRAIN_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=True, + ) + + val_set = Fudan( + cfg_data.DATA_PATH + "/test", + "test", + main_transform=val_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + val_loader = DataLoader( + val_set, + batch_size=cfg_data.VAL_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=False, + ) + + return train_loader, val_loader, restore_transform diff --git a/datasets/FUDAN/make_Fudan.ipynb b/datasets/FUDAN/make_Fudan.ipynb new file mode 100644 index 0000000..c9a1b10 --- /dev/null +++ b/datasets/FUDAN/make_Fudan.ipynb @@ -0,0 +1,462 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import os.path\n", + "import cv2\n", + "import glob\n", + "import h5py\n", + "import scipy\n", + "import pickle\n", + "import numpy as np\n", + "from PIL import Image\n", + "import scipy.io as io\n", + "from itertools import islice\n", + "from tqdm import tqdm\n", + "from matplotlib import pyplot as plt\n", + "from sortedcontainers import SortedDict\n", + "from scipy.ndimage import gaussian_filter \n", + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [], + "source": [ + "def get_img_pathes(path_sets):\n", + " \"\"\"\n", + " Return all images from all pathes in 'path_sets'\n", + " \"\"\"\n", + " img_pathes = []\n", + " for path in path_sets:\n", + " for img_path in glob.glob(os.path.join(path, '*.jpg')):\n", + " img_pathes.append(img_path)\n", + " return img_pathes\n", + "\n", + "\n", + "def save_computed_density(density_map, out_path):\n", + " \"\"\"\n", + " Save density map to h5py format\n", + " \"\"\"\n", + " with h5py.File(out_path, 'w') as hf:\n", + " hf['density'] = density_map\n", + " \n", + "\n", + "def compute_sigma(gt_count, distance=None, min_sigma=1, method=1, fixed_sigma=15):\n", + " \"\"\"\n", + " Compute sigma for gaussian kernel with different methods :\n", + " * method = 1 : sigma = (sum of distance to 3 nearest neighbors) / 10\n", + " * method = 2 : sigma = distance to nearest neighbor\n", + " * method = 3 : sigma = fixed value\n", + " ** if sigma lower than threshold 'min_sigma', then 'min_sigma' will be used\n", + " ** in case of one point on the image sigma = 'fixed_sigma'\n", + " \"\"\" \n", + " if gt_count > 1 and distance is not None:\n", + " if method == 1:\n", + " sigma = np.mean(distance[1:4])*0.1\n", + " elif method == 2:\n", + " sigma = distance[1]\n", + " elif method == 3:\n", + " sigma = fixed_sigma\n", + " else:\n", + " sigma = fixed_sigma\n", + " if sigma < min_sigma:\n", + " sigma = min_sigma\n", + " return sigma\n", + "\n", + "\n", + "def find_closest_key(sorted_dict, key):\n", + " \"\"\"\n", + " Find closest key in sorted_dict to 'key'\n", + " \"\"\"\n", + " keys = list(islice(sorted_dict.irange(minimum=key), 1))\n", + " keys.extend(islice(sorted_dict.irange(maximum=key, reverse=True), 1))\n", + " return min(keys, key=lambda k: abs(key - k))\n", + "\n", + "\n", + "def gaussian_filter_density(non_zero_points, map_h, map_w, distances=None, kernels_dict=None, min_sigma=2, method=1, const_sigma=15):\n", + " \"\"\"\n", + " Fast gaussian filter implementation : using precomputed distances and kernels\n", + " \"\"\"\n", + " gt_count = non_zero_points.shape[0]\n", + " density_map = np.zeros((map_h, map_w), dtype=np.float32)\n", + "\n", + " for i in range(gt_count):\n", + " point_y, point_x = non_zero_points[i]\n", + " sigma = compute_sigma(gt_count, distances[i], min_sigma=min_sigma, method=method, fixed_sigma=const_sigma)\n", + " closest_sigma = find_closest_key(kernels_dict, sigma)\n", + " kernel = kernels_dict[closest_sigma]\n", + " full_kernel_size = kernel.shape[0]\n", + " kernel_size = full_kernel_size // 2\n", + "\n", + " min_img_x = max(0, point_x-kernel_size)\n", + " min_img_y = max(0, point_y-kernel_size)\n", + " max_img_x = min(point_x+kernel_size+1, map_h - 1)\n", + " max_img_y = min(point_y+kernel_size+1, map_w - 1)\n", + "\n", + " kernel_x_min = kernel_size - point_x if point_x <= kernel_size else 0\n", + " kernel_y_min = kernel_size - point_y if point_y <= kernel_size else 0\n", + " kernel_x_max = kernel_x_min + max_img_x - min_img_x\n", + " kernel_y_max = kernel_y_min + max_img_y - min_img_y\n", + "\n", + " density_map[min_img_x:max_img_x, min_img_y:max_img_y] += kernel[kernel_x_min:kernel_x_max, kernel_y_min:kernel_y_max]\n", + " return density_map\n", + "\n", + "\n", + "def get_gt_dots(mat_path, img_height, img_width):\n", + " \"\"\"\n", + " Load Matlab file with ground truth labels and save it to numpy array.\n", + " ** cliping is needed to prevent going out of the array\n", + " \"\"\"\n", + " mat = io.loadmat(mat_path)\n", + " gt = mat[\"image_info\"][0,0][0,0][0].astype(np.float32).round().astype(int)\n", + " gt[:,0] = gt[:,0].clip(0, img_width - 1)\n", + " gt[:,1] = gt[:,1].clip(0, img_height - 1)\n", + " return gt\n", + "\n", + "\n", + "def set_circles_on_img(image, bbox_list, circle_size=2):\n", + " \"\"\"\n", + " Set circles on images at centers of bboxes in bbox_list\n", + " \"\"\"\n", + " for bbox in bbox_list:\n", + " cv2.circle(image, (bbox[0], bbox[1]), circle_size, (255, 0, 0), -1)\n", + " return image\n", + "\n", + "\n", + "def generate_gaussian_kernels(out_kernels_path='gaussian_kernels.pkl', round_decimals = 3, sigma_threshold = 4, sigma_min=0, sigma_max=20, num_sigmas=801):\n", + " \"\"\"\n", + " Computing gaussian filter kernel for sigmas in linspace(sigma_min, sigma_max, num_sigmas) and saving \n", + " them to dict. \n", + " \"\"\"\n", + " kernels_dict = dict()\n", + " sigma_space = np.linspace(sigma_min, sigma_max, num_sigmas)\n", + " for sigma in tqdm(sigma_space):\n", + " sigma = np.round(sigma, decimals=round_decimals) \n", + " kernel_size = np.ceil(sigma*sigma_threshold).astype(np.intc)\n", + "\n", + " img_shape = (kernel_size*2+1, kernel_size*2+1)\n", + " img_center = (img_shape[0]//2, img_shape[1]//2)\n", + "\n", + " arr = np.zeros(img_shape)\n", + " arr[img_center] = 1\n", + "\n", + " arr = scipy.ndimage.filters.gaussian_filter(arr, sigma, mode='constant') \n", + " kernel = arr / arr.sum()\n", + " kernels_dict[sigma] = kernel\n", + " \n", + " print(f'Computed {len(sigma_space)} gaussian kernels. Saving them to {out_kernels_path}')\n", + "\n", + " with open(out_kernels_path, 'wb') as f:\n", + " pickle.dump(kernels_dict, f)\n", + " \n", + " \n", + "def compute_distances(out_dist_path='distances_dict.pkl', root_path='./', n_neighbors = 4, leafsize=1024):\n", + " distances_dict = dict()\n", + " full_img_pathes = glob.glob(f'{root_path}/images/*.jpg')\n", + "\n", + " for full_img_path in tqdm(full_img_pathes):\n", + " mat_path = full_img_path.replace('.jpg','.mat').replace('images','ground-truth').replace('img','gt_img')\n", + "\n", + " img = plt.imread(full_img_path)\n", + " non_zero_points = get_gt_dots(mat_path, *img.shape[0:2])\n", + "\n", + " tree = scipy.spatial.KDTree(non_zero_points.copy(), leafsize=leafsize) # build kdtree\n", + " distances, _ = tree.query(non_zero_points, k=n_neighbors) # query kdtree\n", + "\n", + " distances_dict[full_img_path] = distances\n", + " \n", + " print(f'Distances computed for {len(full_img_pathes)}. Saving them to {out_dist_path}')\n", + "\n", + " with open(out_dist_path, 'wb') as f:\n", + " pickle.dump(distances_dict, f)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/var/folders/wc/w_kk2yzn49s52czst7nrbxyw0000gn/T/ipykernel_11301/2387218124.py:109: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0\n", + "Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook`\n", + " for sigma in tqdm_notebook(sigma_space):\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "72a0558118d14ea3a3e615bbe86944b6", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + " 0%| | 0/801 [00:00 39\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mopen\u001b[39;49m(file_like, mode), \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 40\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mOSError\u001b[39;00m \u001b[39mas\u001b[39;00m e:\n\u001b[1;32m 41\u001b[0m \u001b[39m# Probably \"not found\"\u001b[39;00m\n", + "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: './FUDAN-UCC/part1/ground-truth/63.mat'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mFileNotFoundError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[26], line 5\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[39m# uncomment to generate and save dict with distances \u001b[39;00m\n\u001b[1;32m 4\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39mnot\u001b[39;00m os\u001b[39m.\u001b[39mpath\u001b[39m.\u001b[39misfile(precomputed_distances_path):\n\u001b[0;32m----> 5\u001b[0m compute_distances(out_dist_path\u001b[39m=\u001b[39;49mprecomputed_distances_path, root_path\u001b[39m=\u001b[39;49m\u001b[39m'\u001b[39;49m\u001b[39m./FUDAN-UCC/part1\u001b[39;49m\u001b[39m'\u001b[39;49m)\n\u001b[1;32m 7\u001b[0m \u001b[39mwith\u001b[39;00m \u001b[39mopen\u001b[39m(precomputed_distances_path, \u001b[39m'\u001b[39m\u001b[39mrb\u001b[39m\u001b[39m'\u001b[39m) \u001b[39mas\u001b[39;00m f:\n\u001b[1;32m 8\u001b[0m distances_dict \u001b[39m=\u001b[39m pickle\u001b[39m.\u001b[39mload(f)\n", + "Cell \u001b[0;32mIn[25], line 137\u001b[0m, in \u001b[0;36mcompute_distances\u001b[0;34m(out_dist_path, root_path, n_neighbors, leafsize)\u001b[0m\n\u001b[1;32m 134\u001b[0m mat_path \u001b[39m=\u001b[39m full_img_path\u001b[39m.\u001b[39mreplace(\u001b[39m'\u001b[39m\u001b[39m.jpg\u001b[39m\u001b[39m'\u001b[39m,\u001b[39m'\u001b[39m\u001b[39m.mat\u001b[39m\u001b[39m'\u001b[39m)\u001b[39m.\u001b[39mreplace(\u001b[39m'\u001b[39m\u001b[39mimages\u001b[39m\u001b[39m'\u001b[39m,\u001b[39m'\u001b[39m\u001b[39mground-truth\u001b[39m\u001b[39m'\u001b[39m)\u001b[39m.\u001b[39mreplace(\u001b[39m'\u001b[39m\u001b[39mimg\u001b[39m\u001b[39m'\u001b[39m,\u001b[39m'\u001b[39m\u001b[39mgt_img\u001b[39m\u001b[39m'\u001b[39m)\n\u001b[1;32m 136\u001b[0m img \u001b[39m=\u001b[39m plt\u001b[39m.\u001b[39mimread(full_img_path)\n\u001b[0;32m--> 137\u001b[0m non_zero_points \u001b[39m=\u001b[39m get_gt_dots(mat_path, \u001b[39m*\u001b[39;49mimg\u001b[39m.\u001b[39;49mshape[\u001b[39m0\u001b[39;49m:\u001b[39m2\u001b[39;49m])\n\u001b[1;32m 139\u001b[0m tree \u001b[39m=\u001b[39m scipy\u001b[39m.\u001b[39mspatial\u001b[39m.\u001b[39mKDTree(non_zero_points\u001b[39m.\u001b[39mcopy(), leafsize\u001b[39m=\u001b[39mleafsize) \u001b[39m# build kdtree\u001b[39;00m\n\u001b[1;32m 140\u001b[0m distances, _ \u001b[39m=\u001b[39m tree\u001b[39m.\u001b[39mquery(non_zero_points, k\u001b[39m=\u001b[39mn_neighbors) \u001b[39m# query kdtree\u001b[39;00m\n", + "Cell \u001b[0;32mIn[25], line 86\u001b[0m, in \u001b[0;36mget_gt_dots\u001b[0;34m(mat_path, img_height, img_width)\u001b[0m\n\u001b[1;32m 81\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mget_gt_dots\u001b[39m(mat_path, img_height, img_width):\n\u001b[1;32m 82\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 83\u001b[0m \u001b[39m Load Matlab file with ground truth labels and save it to numpy array.\u001b[39;00m\n\u001b[1;32m 84\u001b[0m \u001b[39m ** cliping is needed to prevent going out of the array\u001b[39;00m\n\u001b[1;32m 85\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[0;32m---> 86\u001b[0m mat \u001b[39m=\u001b[39m io\u001b[39m.\u001b[39;49mloadmat(mat_path)\n\u001b[1;32m 87\u001b[0m gt \u001b[39m=\u001b[39m mat[\u001b[39m\"\u001b[39m\u001b[39mimage_info\u001b[39m\u001b[39m\"\u001b[39m][\u001b[39m0\u001b[39m,\u001b[39m0\u001b[39m][\u001b[39m0\u001b[39m,\u001b[39m0\u001b[39m][\u001b[39m0\u001b[39m]\u001b[39m.\u001b[39mastype(np\u001b[39m.\u001b[39mfloat32)\u001b[39m.\u001b[39mround()\u001b[39m.\u001b[39mastype(\u001b[39mint\u001b[39m)\n\u001b[1;32m 88\u001b[0m gt[:,\u001b[39m0\u001b[39m] \u001b[39m=\u001b[39m gt[:,\u001b[39m0\u001b[39m]\u001b[39m.\u001b[39mclip(\u001b[39m0\u001b[39m, img_width \u001b[39m-\u001b[39m \u001b[39m1\u001b[39m)\n", + "File \u001b[0;32m~/Desktop/MAP583/dldiy/lib/python3.9/site-packages/scipy/io/matlab/_mio.py:225\u001b[0m, in \u001b[0;36mloadmat\u001b[0;34m(file_name, mdict, appendmat, **kwargs)\u001b[0m\n\u001b[1;32m 88\u001b[0m \u001b[39m\u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 89\u001b[0m \u001b[39mLoad MATLAB file.\u001b[39;00m\n\u001b[1;32m 90\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 222\u001b[0m \u001b[39m 3.14159265+3.14159265j])\u001b[39;00m\n\u001b[1;32m 223\u001b[0m \u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 224\u001b[0m variable_names \u001b[39m=\u001b[39m kwargs\u001b[39m.\u001b[39mpop(\u001b[39m'\u001b[39m\u001b[39mvariable_names\u001b[39m\u001b[39m'\u001b[39m, \u001b[39mNone\u001b[39;00m)\n\u001b[0;32m--> 225\u001b[0m \u001b[39mwith\u001b[39;00m _open_file_context(file_name, appendmat) \u001b[39mas\u001b[39;00m f:\n\u001b[1;32m 226\u001b[0m MR, _ \u001b[39m=\u001b[39m mat_reader_factory(f, \u001b[39m*\u001b[39m\u001b[39m*\u001b[39mkwargs)\n\u001b[1;32m 227\u001b[0m matfile_dict \u001b[39m=\u001b[39m MR\u001b[39m.\u001b[39mget_variables(variable_names)\n", + "File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.9/lib/python3.9/contextlib.py:117\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[39mdel\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39margs, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mkwds, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mfunc\n\u001b[1;32m 116\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[0;32m--> 117\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mnext\u001b[39;49m(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mgen)\n\u001b[1;32m 118\u001b[0m \u001b[39mexcept\u001b[39;00m \u001b[39mStopIteration\u001b[39;00m:\n\u001b[1;32m 119\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mRuntimeError\u001b[39;00m(\u001b[39m\"\u001b[39m\u001b[39mgenerator didn\u001b[39m\u001b[39m'\u001b[39m\u001b[39mt yield\u001b[39m\u001b[39m\"\u001b[39m) \u001b[39mfrom\u001b[39;00m \u001b[39mNone\u001b[39m\n", + "File \u001b[0;32m~/Desktop/MAP583/dldiy/lib/python3.9/site-packages/scipy/io/matlab/_mio.py:17\u001b[0m, in \u001b[0;36m_open_file_context\u001b[0;34m(file_like, appendmat, mode)\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[39m@contextmanager\u001b[39m\n\u001b[1;32m 16\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_open_file_context\u001b[39m(file_like, appendmat, mode\u001b[39m=\u001b[39m\u001b[39m'\u001b[39m\u001b[39mrb\u001b[39m\u001b[39m'\u001b[39m):\n\u001b[0;32m---> 17\u001b[0m f, opened \u001b[39m=\u001b[39m _open_file(file_like, appendmat, mode)\n\u001b[1;32m 18\u001b[0m \u001b[39mtry\u001b[39;00m:\n\u001b[1;32m 19\u001b[0m \u001b[39myield\u001b[39;00m f\n", + "File \u001b[0;32m~/Desktop/MAP583/dldiy/lib/python3.9/site-packages/scipy/io/matlab/_mio.py:45\u001b[0m, in \u001b[0;36m_open_file\u001b[0;34m(file_like, appendmat, mode)\u001b[0m\n\u001b[1;32m 43\u001b[0m \u001b[39mif\u001b[39;00m appendmat \u001b[39mand\u001b[39;00m \u001b[39mnot\u001b[39;00m file_like\u001b[39m.\u001b[39mendswith(\u001b[39m'\u001b[39m\u001b[39m.mat\u001b[39m\u001b[39m'\u001b[39m):\n\u001b[1;32m 44\u001b[0m file_like \u001b[39m+\u001b[39m\u001b[39m=\u001b[39m \u001b[39m'\u001b[39m\u001b[39m.mat\u001b[39m\u001b[39m'\u001b[39m\n\u001b[0;32m---> 45\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mopen\u001b[39;49m(file_like, mode), \u001b[39mTrue\u001b[39;00m\n\u001b[1;32m 46\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 47\u001b[0m \u001b[39mraise\u001b[39;00m \u001b[39mOSError\u001b[39;00m(\n\u001b[1;32m 48\u001b[0m \u001b[39m'\u001b[39m\u001b[39mReader needs file name or open file-like object\u001b[39m\u001b[39m'\u001b[39m\n\u001b[1;32m 49\u001b[0m ) \u001b[39mfrom\u001b[39;00m \u001b[39me\u001b[39;00m\n", + "\u001b[0;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: './FUDAN-UCC/part1/ground-truth/63.mat'" + ] + } + ], + "source": [ + "precomputed_distances_path = 'distances_dict.pkl'\n", + "\n", + "# uncomment to generate and save dict with distances \n", + "if not os.path.isfile(precomputed_distances_path):\n", + " compute_distances(out_dist_path=precomputed_distances_path, root_path='./FUDAN-UCC/part1')\n", + " \n", + "with open(precomputed_distances_path, 'rb') as f:\n", + " distances_dict = pickle.load(f)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "6b9af7f918e140e0a8b931c65e841c32", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, max=482), HTML(value='')))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# generate GT for part 1\n", + "data_root = './FUDAN-UCC/part1/'\n", + "img_pathes = glob.glob(f'{data_root}/images/*.jpg')\n", + "map_out_folder = 'maps_adaptive_kernel/'\n", + "min_sigma = 2 ## can be set 0\n", + "method = 1\n", + "\n", + "for full_img_path in tqdm(img_pathes):\n", + " data_folder, img_path = full_img_path.split('images')\n", + " mat_path = full_img_path.replace('.jpg','.mat').replace('images','ground-truth').replace('IMG_','GT_IMG_')\n", + " \n", + " # load img and map\n", + " img = Image.open(full_img_path)\n", + " width, height = img.size\n", + " gt_points = get_gt_dots(mat_path, height, width)\n", + " \n", + " distances = distances_dict[full_img_path]\n", + " density_map = gaussian_filter_density(gt_points, height, width, distances, kernels_dict, min_sigma=min_sigma, method=method)\n", + " \n", + " curr_map_out_folder = data_folder + map_out_folder\n", + " gt_out_path = curr_map_out_folder + img_path.strip('/').replace('.jpg', '.h5')\n", + " \n", + "# #plt.imshow(img)\n", + "# plt.imshow(density_map, alpha=1)\n", + "# plt.show()\n", + "# break\n", + " if not os.path.isdir(curr_map_out_folder):\n", + " print('creating ' + curr_map_out_folder)\n", + " os.makedirs(curr_map_out_folder)\n", + " save_computed_density(density_map, gt_out_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "61b63f97f03146bba6907cddfda42e04", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(IntProgress(value=0, max=716), HTML(value='')))" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# generate GT for part 2\n", + "data_root = './FUDAN-UCC/part2/'\n", + "img_pathes = glob.glob(f'{data_root}/images/*.jpg')\n", + "map_out_folder = 'maps_fixed_kernel/'\n", + "min_sigma = 2\n", + "method = 3\n", + "const_sigma=15\n", + "\n", + "for full_img_path in tqdm(img_pathes):\n", + " data_folder, img_path = full_img_path.split('images')\n", + " mat_path = full_img_path.replace('.jpg','.mat').replace('images','ground-truth').replace('IMG_','GT_IMG_')\n", + " \n", + " # load img and map\n", + " img = Image.open(full_img_path)\n", + " width, height = img.size\n", + " gt_points = get_gt_dots(mat_path, height, width)\n", + " \n", + " distances = distances_dict[full_img_path]\n", + " density_map = gaussian_filter_density(gt_points, height, width, distances, kernels_dict, min_sigma=min_sigma, method=method,const_sigma=const_sigma)\n", + " \n", + " curr_map_out_folder = data_folder + map_out_folder\n", + " gt_out_path = curr_map_out_folder + img_path.strip('/').replace('.jpg', '.h5')\n", + " \n", + "# #plt.imshow(img)\n", + "# plt.imshow(density_map, alpha=1)\n", + "# plt.show()\n", + "# break\n", + " if not os.path.isdir(curr_map_out_folder):\n", + " print('creating ' + curr_map_out_folder)\n", + " os.makedirs(curr_map_out_folder)\n", + " save_computed_density(density_map, gt_out_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVAAAAD8CAYAAAAhQfz4AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4wLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvqOYd8AAAIABJREFUeJzsvUuPJdl17/dba+8dEeeczJOPencXm2y2KFGSZVg2cO0vYMCABx768QHuyB/An+UOPDbg4QUu4IlhwDY0oEyRFB9XJJtssruru6orn+cVEfuxPNhxTmZLurTYEKE2kKtRlZXZmedE7Nh7Pf7//1opZsaDPdiDPdiD/f6m/9IX8GAP9mAP9v9Xe3CgD/ZgD/ZgX9EeHOiDPdiDPdhXtAcH+mAP9mAP9hXtwYE+2IM92IN9RXtwoA/2YA/2YF/R/iAOVET+KxH5OxH5pYj8T3+I93iwB3uwB/uXNvnn1oGKiAN+DvyXwCfA94D/3sx++s/6Rg/2YA/2YP/C9ofIQP8V8Esz+5WZjcD/Avw3f4D3ebAHe7AH+xc1/wd4zXeBj+99/gnwn//Oi1jMbfnOczbrDWSQAppAMkguSDawgpmhCDVnvsuczYR9Ji0imIGoQ5xDnSAq5H6YfvLLGbfd+6uUgvyuCxVQkek965ecqzGolIJzDhXFeY9q/XpKib7vv/wy02uICFaMpgmEpvnyRYlRcgaMcYgg0HUdfb+j8R71ns22n67F6nXJ3e3JvRv5UpEhdx/sH/2Gw0UCoM7V6xWlyPR1A/TuDUop0z8MzBADswJWENHD65dSn6NTEFFirj/nVUAEESVbwRDUe8Q7shjaBLIkxEMRo6jUyzMo2ZAsKEqDowyJRhQxQ4pRSkZFySkxxPgPbnW/TjJdg6qr9yZa0wsRTO+t7fRRDMQMDNIYwQyvDjCsZMzqc8EMdQ7vHWZGKUZK6fAwDKv7RhVUKAimYE4xAXNSP8rdhUsBCjgDsuHMaENAMAQBM9brDV3b1GuZNpW6wJgy6hwAef+8pJ4fM8Op1s/v7Zeyv28BQep+mO4d6jqbWV2a6Z6njXW3ZtOL5ZRBwDkPpeCcknPGprUo0/XUZ1LPUjHDnJIdmArmwJQvpX9igpS6NtVv1OuilMO+PFzPdG1md2fRzNiUm7dm9uQfHob/sP0hHOg/yUTkXwP/GsCfHHP63/7XnEYHN0azdTRXhWZVaK4G/LqHcYQx4tg/PAPLmCnDKJgoOCWPA14Uc45ueULz7AW+jNy8fY3frZCSgbsHbYCVQhMC280Gy5m7UyKHDSBA1zWE4BnHWM+XKm0bcE7RaSN07Zyc4YNvfwez+lrf+973SCljUg+e90rwDuc82+2O9957lydPnhGaju12zXx2NC0S/PgH38fCij/57l/QzE4pcUR95rPffsTx+VN+/stfE4eBJvjDBjczgnfEHOm6GZtNXx3FFADU3R0AmzZVMZv2vSCqiDpMjOPTMyJGCYFN41k8Osc3nuyMJAaqUMDGSLndYduBNhbSbkcZepaLObv1hhQTm82OmRfmWtflYpPJwHmnqG8w17B48pjiHP7klN4Dj494ayvSSaF7qtzoyHrmGIYtJ7NjzrszVr+9xW4LH8xf4N6sObodaQu4fkDJ7G7XlGj89MMP8SEQU5qea71X9Q7XBHDK7PQEnXfI0Ywyb8jLQG6U0kAmY8XwWdCh0PagfWH1yRvoRx7N56TtFmeFnBOU6hbEKbOjBWJwe7vh6uqmOjoVfNtiAqdPHiOzho0acdbQvnfO27winXpyBzsZKBRa18ImEXZCewvuZuQsJf705fvMzGFj4q//6nskzZQygDiWZwtOHp0yW57x8cU1rpsxlkjM1ZmrCpoNNeFo3kyBzChOKI1jF0dK6ylSmM1nSDY0F9xo6JiRzYDPhbLbYTFBSljO034qdZ9ZQdVzu9pgCIv5jCyC5kjJhd2YKCVToK7X8YJ23mGNwx0fc9sI/XlLXArr40ycFcLS8fjRGa9evaW1DrdWmjV014WwyoTbEd2NsOshJigFK+Xg4NMYa4BVxbLxV+t/+5vf14/9IRzop8A37n3+cvral8zM/g3wbwDaF89Mk8N6o5MWSkFyjRw2RTdFKLkwloylKQRbARSzDhPDrNA0nrTbQoZ0m+hCod/uePr8BW8+usFLmdbP9tcBwDiOVA8kh2jKvUBarJBSQrRGSpmy3lIKoanRX6WGxHbKJmXyWqpaw+LdvSMiNeOV+v/7YcCHjsvLS2YvF/VaDN7/9ge8fvNjfAOIIr7h+vozgg8cLZdYKRTbR25lunJiyogqu91Q76neRM0isUMGZ3snus8sqNmhOEVUGUsmzGeUbkbRwtpGLEeKAz/vUK+1YhgDaRzR4kklQvCQHCC0baBrGvp+xImhOnlzwDvFhQZEJ2deF71tGz765GPeeXRE2Q24hSduRkooxCJ47fjmi/c5X5zzo5//Na15cj8ydx4vie36lqVvaH1D8oEixvFyyXq9ZrpJANTVLAcRzHkSQkyZnEdSLoxxJDvIBYpWh+gRSJl58YRkWOPRVEAdGjxlHCnolIFD8A6zglldY6FmuKpan58KMSdcCRSvJC3EcQsLR2mF0hnJMr7zxJjQVJCsqBiucTRdIHRzGDLFEo8evcOrV5/QdMdES6yTI/YZyzfkWUPpHGPOZO9Io+CkOpCunWNqlFzIGOaE2Aix85TGMZSELkCK4C1gveEHh+RCGTOSm5rJ5lwDay6HM6SiiGgN1CXThMDN7S1qhpVSM/rprM1mc0SEzXbL0fE5WWEkM4oRi2FiuKD4oCSLqBdKARWj7LNU3Wfu9RmXXLNuUZn2vRHmnhRjPY/61bigP4QD/R7wHRF5n+o4/zvgf/hdP6DmaPoW242cuI5x2GKj4cca6Szn6kynsqxmjlOpiKGqFBHECikOCAnLRh4y3dhwe/2W9cUl2nhgKjfv1bhy+MsoBxf0ZduXNSK1ZA1tQ9fVjDEEj/OOpg2UXHBtXdaDc5scpVOlTGVrmcqlO6vvG2OcoIaa9s6P5rzTvI/6pn6PCqfnT+GkRu6//E//sma4OaP1aE73I9g+2E7rdbhNqDtret/Dv4V9EVhLWOcoquTg0UVL0UTqBO2U2Bq7dsQFD9mIOdIshEYCuRiSC+oDbt7hBqGMuToNr8iUKXtfiMUoVrP5fYagwCe/+S0v33mOjZmyjjRHAWk9wRzNmHny7DGPm8foFj549j6f/uRXDHlFud4xdx2kxLYfCfM5b9+84dmz53z729/mRz/60aGq3AccUaWoA1EGAQtKmnniQhmPhdQJsc1IEGa+w49GDJH+Vkgp0XQeSXCxWsE4UoYByYalhFB4+c2XZArOhF0/1vcUrdmWCKhiqsSSKapI49mVyGgwmFBEsFDAT0HfA17JGilOePTkHPUBRsN55fTRYz77/FOyCj4syI2jl0BpHLOnx5RWWO8Svmvwbk6/2SFjpl00iEFOiUKhOOh9YfCOUTPWBtZNZuZnxD7TBMEaRyiFsk2EfVWXC6Q0Ocu7/XZ5fY2oo2kaVjc3nLQt3gdEhd0w0CwWrLc7trsByQFtA+N6oBShOVsQRcneEwKMNnB8dMyu39WqKxXE3MEn7A+Caa1GJQgHdydAMXIpaBNIY8SrwtXv8lL/uP2zO1AzSyLyPwL/G+CA/9nMfvK7fqbxDdwWjqTF7RKzrAgNwWU0FKQpmESsNFNWOmFIpZAL7MaEb1vSGO9SdKlO483VilGFJhViHnHOH7KxQ8k7XcchO+Afj0YCBweYUmaz2dA0DaqKAKvbFfP5guvra548fob3ipsyOzMj51yz2T0O5qrjUFViTNPrJkDvsEx1qM6AgJXpwIkHJyh1s5wsl8QYq+Pau8gJM67LUTiknP8gPPz9+61Z+B77y8AQB8rgGI89zBRmQuogB2OVN7RdWw9KATXFdtVR4hyr7Q7bbZFYn8cwDgRnqDqcD4zjWKsMqdcoVJghx5HOe/oxo33GVhEVBw04Mm5WCBuQKNiqsLAG6SONKbeXVyxCoG0Clgs5FX772495fX1T8c19JKSGTFGtnztXD5xXcoAUILVGaiHNAJcxenDK+fNHXG0vkKAUL2hwNeDsg9b0LIyKq+ZiiCopJ8S5ur5Sn2cz60CEbIYLAbxH1VCFECpcok1HzQuFivgqPgSaBI8fP4Z1DcZXVxeE5gRTwfmABE/xDlm05LnA0mMzR2o8yRtmI+Id9NAHQ9RRopINsocdI6Vz6KJhfr5AFC5fXxGc8mi5JOSAN4eVHcNmR46R3NdSXktdW6eKEzlUXb5tKjew24JozXYFNuNAzIZrOnKB4JpaWWSHwxOLkIriTRExbFS2NwMuOyQJkkCSIameb5v2kjmtCYHIhN0L4pQSq7/wXYukzFexPwgGamb/Dvh3/9Tvdwm+vfwmq09eobtI1ywo44a8i1g/TpiKTTmaUR2M1ZLWKQWbSkBBJtzREBxGyYnWBbLLSDFcF2iahhwTRoUJUsmV4JjIEuG+c7WDUxWtJYgwbQTvEIExRrzziArOCU7rwQTH0PfVccLBf9X7qIcqtDOuLi540inDMQSbgsPeD5rhmxZoEO2pjywAHuMWMeNP/vgdfvq3v2Es9c6dmzDMXBAH3rta0k8bOGc7kGFfxiqmbExcLeMlkNUjviO2gTJ31XHOCzYXclvo2o5xlwlBGXOiCR25L3iDMkQkJohGiSNmhYyyzYmZGG1QxuLpU8HljAsNpo7rq2uePXnMsNpA1/B4cc5qfVshkjLw9NExi5XSXhkaI99oljTuFuKKst2yaDtaVfrbFWEx5+T0hA9/+xtCU0vMlDJ7PoyprK6eG8QJ2VenWDyYF1IwwlxIJTJGePH4FBsiJRjFGaYODVOgdHsCpAZb0VptaAXNa+XRBEzABU8RYbZYEGYBbT2xFWIAgiM3jlGNUYQoymaX8Ka4BJIKDcI8KBefXfF0cQ4588WrN7x4EWjatjqO4GDekuZKPBI2XSHPYXBGM4PNbkCL48WLc+LNltEcJQrFlOyMwQTmYLOCn2Wubi5hruQkfLFZsUwNJyEQZoH2+BjxAfMeYsJiwkqecMaAscKpY73ZAjDuoaSJ+C2A+A7TgFPIcWTnBKIQ11vMd3iB3grhrGWzW9NIQ44ZSYbuCq4X3AAS6wsWFXAeGsArxTtc29CnAfAISsyGDMPv4eHu7F+MRLpvGjNPmhMuLn7B827J+fIxrz+7xZdMKgnLaWL4KoMnDjChWEEwjo4X7IaRyqxZZUdF7hi9PUMP6JQd+OCASryE6ToObPLESJZSKKVmclYKpeRKGKkiKsxmHbvthsVigXee4JXglO6oISiA0Xbt38t0p2uaWOxcMohHRGhDoHH1OvYsPiLoVGZn3uLkEVigGGy3t+xWlxzPPH/+3ff46S8/pZir5ZdVAsFKIeUpG91juyIHoMKmbMymwLDPYBWdsjFPUShByY0whkKaCdoaOlciI2GunHaO3WjYoMQglZhwCkmxCQsWrddjU3ZgRiUOVAGhFOOzTz7FNw3HYwRfvy+0Da5PePXMEOYbaHNPc9tz+fkFT08e8a0nL/no0x8RNz29RrrFESkO5BRo5wuKuOkpywG9qVB4OeDg+/U+ACEThiZSmHcdQ4oky2yHRJsrblsomNTqwu9fdHIIqAJ1D8lezeAc4hTXBMT7uk5tzRQ1uKrqUECl4n25wgO3tze0TYvEgh8dPsGxC4Qhcb58jA2TB7JCimMlxxqPNAFmDWXukJkyNpnoMnlecCee2XzO5qbnOq05ebQg32ZyKZgo5qWStBLJFCwmZscN2/VARipei1CckkVwAk4U1GGaKVb3sVjFks0qHilSz0/K5e5BiODVYRRUaoVmJhQ3PYcxoH2i8XD2+BzXdmzywNXNDR4H0dCdIdsEIxArbGYi0CgWlNI6UoDZyYyxr9mMGbgMuvvHgLv/b/taOFBJhdnlyH/y/p/z8c/+Dn+UGK6v0DTic30QliM+uJphTQdOtSEX44urNeIDzntyvIdG11qKMmVcBWEcBtq2qa8pe1mDfPkQcVdaV4BZMSvEsa9ypQnL3O12LE+WbDebyamCYjx/9pj1+pb50RJVR9t1pFiz3GIZbJKuTA84pchuWzizgveK6v566uaJww3Bg7oBYwd0iBZ2K7h8nZHTgea05clpw+sv1hTzCI5MwhC8V3Ip1THc3XLd0HeA4D2UtEpp6qeChIA4QZuAdkBnxLnRdNAKvH/+iO2bWxaPTrl8taVpGkTiIVCYVpzPu/r6TWjrQS8F7z1N8MQYGceIqWPWzUhjrOREKkjM6BiRIpzN54SbjG5WvLr8IcIMJwtIMNysaBBcqUG0azr6IXJ8dkbO5QCl7G9zj/VaKTVLMRAznIEr4IrgslCKkvtMG1q6RrDeSLuIJkOzHSRNNQDfkxsBJpMkKzj6fkTaFrxD2rbK7JpQWf9aVCBO8AXykFDxxCFym7a0M4d34IpDB6MZ4bvvvcfVbz7FZ4fGyE9+8CMaMa6ur1BXn2ehkl/FCdIp/qjhNt1SusIgPWHm6Eohj5EoCXMGoa5JxjCdlBYOEGOxCLS+IW6FtEsM48jV7Y5mlenWEdkO2GZDGUYsxspf7LN+EVLOe/UTXlwlZac9aegEoxk5jzWxyBlSQofEInXMpUE2iZw3tG3gvD3nszfXaHFIn9FBkF3CYiaZVQfaenIrpCNFThpe2xZbCgVDNSBDwW8dX8W+Fg4050x+fUtzOuNovsAPhbn3kCbtZvCVZc+ZlDJFKkFjGCkbFEcT6qFLdp8fupMi7QGAXArDMNxnU6pm9B42aPssccLkqi9TTPTAIFf9XuD2dsVsPmO13rA8WVZ8VZVd3yO+SkJUBBc8zqRiN/e0bmDklLFQU2vRytjKlDGVAiVm0B7RU2rpDoKjaRQjc3k1cHlxwQffeYf1qme9GcFq5lrx3sKeZDS+JC6omcaXeLWJRitV1aDsQROFVNCsuCK1PEoFc7DerHn+/nt8/rNL4pDwySrUOGUWznkKiVwyJSU650lmVEmgVGmVupoN5szYD5UcdJ5sxnYY6RYtljNeAzYYvk1sNre04QweR8qYcCXTaIVu+u2aJnSVZc254qrl/t1PKNCecCil/kmFMkY0eTQKbgSCZ/t2x8lJR3CeP/+TP+OH/9cPaLJCLGg0SNPPF+5sqjLEVXx0BLRtMFfLalMHXSB3DvEgru4dEfDmoTTE1ZbGB5zU9dECIcIsw8c//TkLccg8QsoE9QRVVustVu4Rg5MKI7SB5BWKcbw8ItuA2sisNZzzBFx1olOUTWOsTt0LZCGIJ/WZ4/kpb95eQlZKqqx9SYWcM1oKpdTTZqX+qYGlEmcxJfZSu7rd9hCKknOhcYEU41Qp1uBZEIiJtNkSjo+4/PQKmXmkcRjgx/q9PhmSQMt0XhWyE8YAeSYMcyN1I6kzbFafT8kJGQ3ffX1Y+N/bTITFm2sGyRwdL9ldXOMmfVYeR7JlSi6TQLncy6I8wQldA2YJKTtqDpgAwyRgJd/xJ4CR2fVbQgiIQcoR1f0yVIkEZe88BXH1wTOxpUyRXUxJFI5OjmiaBh8aRJWMQ6zw5OkzdkOafLiiVCkUE3EA9WObMogjegfboT7UUg5iZ1Xh7esV735jweWbN5w+elbTIxwn54/5/JNPsdJgwC9+8Ru+8d5LtpuPEQ339Mx2uP9yWIuJly/3y1cm+YmA5soixEgalNILzeCxIJQNldBqjIjw+eXI7evPOGeJi4kmC4EGDbGC8ypIcASpzqM3Azy46miHVEipgBizectmfYsfuyr78RVXttExDJFGAq0zJAXC6CjjBtv1KIX11YqwaJm1LeNupIRMN+uIMR5Y2UO1MVUnYlqVHVTnrTHjRge7QtCKoUkynp/MCWNDn4yLD29o18oTP+ft5gs0GjJWzK/K6/YibWNxvACvSNOgqVQn3gTsuCEr+EVLcjBvHalk1Kj7tygXn1zgtMJCIdbnptmm9xt49uQx1ifKkCjrGzyRk/PnvPnVRzVzdA5iwVKGojjx9EOhJGG92tF1VUWwCB3vf+OPuPz0hqv+EhkLTjxlk3FtFag3PjDeFHQxQ33Hd17+Cb/+m1+iQyFEIRRFS83EdVpeE5n225ez8v1m3GeiOlWUTqHkeJALVn4iEczDGDHg+vPXLE5P0dKQtgXvHPFmheJqEJsgPFPBWk9pHWXpiUfGsMzEs4wsEvOjAbVM03TkErh+dT/y/dPta+FAc4yMuy27jdC0Hck5Ouer41OlTCRyznnKALnTm4mwyIU4ZjY0xH6cNi/sU6v7GZ/g6uG3mlupVCB5rzSXqWzRiYiRqfyEKTfzlcBqQkDUmB0tatdH01Eskyl8/uYLnr/4JrP5HAr82Z//OT/8wQ/39FfNulQm9llIpVAkcLNaE9oZcYx0s3DYYG13xPXVivNHT9lse5rZjBy3dLMGLE/3Baqerp0RxNdk6FCm3kWQqkE0zOrX8r5U35c7ZohMGltLUDwhlXoQhwJeIAiuQB4hWyYl4Wp7zWymvHP6lMvXryjbEdePyBixMSIxT6/JlBnvO1r22YmQcsKpxzlHSVOwKa5mUTGRYuRqGDhuHY8eP0L6ntlizu7qDevtBssjcRCk64jjQD8WjpdLsjpyAZnu+YBh7LHpetpRMyRlJBesjzjxNM6RzfHk8TkxJnw0vrF8Tnc6Ur5Y0ZknjVWoLXsJzz67UmV5elodmfP0GMwacuuQo0AOIHNPESN1AU2CJUOSo99EyOCS4T24nGoTSYZQjLztWZUrzpan6Djwqw9/xdnZU3wzx1KCIPV6SsGy1Zg7ZvrrDdJCXidibuh0wbAzxoXw8sm32X2yIacRKeCGui/IQqHgWqV1gU9++1s66ZB1qsTNWHkMUq7az0Od82Wn9PdQsruvs68QD1GOvRzKeYeUQukHnBPKOpHU06SOrm0ZNhsetTOur244yPFUawUnAkGrlnYm5KOCW46cHm95d3HL89OW3bhilVte+TM++ae7rIN9LRyopUxvmbzaUkYjKvTbDQo4YWoHLHfl754IcHXzBRXiZkVJbsI6Fch3hMkhIwCnAhRePD/HgDhGckns+oFJu0/OIGEvrq6geMoZH5qqe5RCFmibgMzmzHyoj75Eggv0mxUEh5gHqcJr0bo98qGND2quUXWQXYExG+ePTljdbum62QFGOH/2Lj/50d8wm7XMuzkf/vpXqAvEOBI1TAGiEjI/+cWvD8xypTAAuxP53+Gc9TPH/Za2isnqHsIoBVKuesbBIVvFmeJMKMFIQUl4dkOiTZ642jGUkdmYq0OZyKJKniiWKyEjphSZMgWDYhmZ3nO72U7XV5sognhyyqQ81AMVHB6Hy4WGwkwL/eotXfD06xXExKPT04qvhkrYxDFixaYmCLmTwE5BNcVY8UjVSlj2gpfaneMLjNtC8ltcG/BDwp+MfHP5Dv/P9/9P8nZE+wgxYikeNMuiionVIBoCuQ2MO4hdoBwF4hxyK8xOPM7BzoyQPewSYfTcvlnjcNAPzDtP6neIQRDFmXE+WzATz+n8FKLRNnPGpLgMzdQh5oKSvVBiwpcZaTWirrAMc4acyesavAKej3/8Cd17HX/x8rv8+Ps/oMRMl4Q4GMUXvHXsbntWby5pi9KqoBtBt0boS+1IGiv2SK7JjU2ttEyEmk1wGfs9N+1FVTcRnHqo0GQK+pINm5QpeRhQ7+jM2F3d4I6OaNWxu7hChxGjKh3wfsLdBZxQWiF3hiyFF48j78+v+FdPLjgqtwxuwD1+j//9V1d8/yv4rq+HAzXjlsjpTWJ7Krz64g2LYerzPnTrgBUjTaB0sUJMGTPBAcHvCR+HmdZsw2Ra/OpwzWA2a3n27DEiGUFofEuxwNHRorLGKohvDiJynEOCZ9UPXF6vyCGwBxTH4FhLfUhNcMx8YN7OsLnni4vXPH32bSxX8PzR48dcXlzg3L4rpWY9okqOuepTc8Z5pWmaCb2sW+xqvSIBq9sNj5rZ1BZXO3huN9spo7apGwvMqoCfYgcBv0yllE6bU1QP5c7ELR02tCCQS8U9YwaNmHN451Gbss8g5KAMMVUiJRqaBuaLY9rFCVe3r9l34ph3mArqyiGz0D3mKIJaQUrFPGNM91QBezF6JviGpgm0bcBipL/dkOOAHi3YDVtOwjEmgmuqfCdRaFzDMPT8/O9+ztxpJRVgjwzXZyBQsuBVKXGsTnUioVwOOAS1TGgTxSJz9ZTPV7imhdWI9gOSEiWmKhubsHmhYp9FBA2eUQq5C5RjT5wrw7FROiGcCdtxC33m9PiEEgfmoeX8fMHN61vclG26lJGUcOqI/Y7F0VO8CjZG/u43F+QIjxeK18zyuMNUSM4wzYh4fAqMo+LTgKURCUbjFWegVvAJXv/4Q55++09p1sZu0xMmQgfvSOsty0XHGCNqgi8ZNhE3RtxQYCKNSDWQVOjdJgirBq0mtHvgaNppe+h535lnOHX4EGrmbBWHt1I1rZbBSuTy8zecP3rEsBow8fSbHqRi9+I8SIVl9iQeCnhBWmPZ9LzbXfMt+5TnnVKaTPafcnG84H/9Cr7ra+FA1TncrKHZjqwx3n33Ha4+vN5XdzjRGskAN8lBzIw2FARX5UZWCOIZYqp46CSVEPTAvKoqQx/57LM3tU/YwPsGcUpo/MSKelwYcV2LNIHQBrTznDxa8jaNWNtSpJYmrgkkpyxmRxy1Da2VWmJ55ZOPPubpy1CvNSW++a1vMQ4DMY5YLgfJlJnR70auLi45fucRabfh8noLt7csjo6JMdF2bcVC377Fe884DATn8OpwwjRowkD8QROL1XJbqHKuveV961yuB30/ROJAxu8VDs6hxXBS5Ug+FzTWwRwOyAnSULMtHTMl1vRdXOT64oqcCzNfcU4rdZBGnogBcqHkNGW3eeouY8861XWZ/tKJsLNSK4phF2m94/howdVFz3q1Ijd7uVpti/3bn/x7zk+OWB4dc3FxwdFiXgX1GWJO5KlDzDASE4xgRkkGEhErWC4Eg1nb0TkHtxu8C1gemT8Bt4vIELGYKtGRErLHmqbr985zu1qhJVJOj1icL+FI0SPBnQjlxlHVAAAgAElEQVR5JuQQ0dZTgrAaNjw5OSKvR168fMrN61taH7B+IFATiNhvISc+//gTvvNH34GcGSXQdkrJkWE7QEmo+nq4pT7LYTfiaGhKlQ9JULx3VZJfgH4keEVWI80I/XqowVcEfD1feZPwqtXJFaNsRnzJ2JBqBh7jpP00KLnKDqfST5iC9l4qOMFYopCGcdId1+fnJjmTiFByrpVhgSY0jOMalfoz210mtHPykBA1xE3chU1E6/0sd1KaiNQGBbVI3grzECAnTmX9lXzX18KBehGCFezJgkWrLJNSzs4Qy5Sc7w75PYZ9r9M0oUb/aJzNA6vbGxSHUsDqRJuqq6ylQ9O6aQBIUzOyqStEnKvtcCFA42HeUZrAtg3kICTp2Z45/EwRH2i8Y5UjjTOOzmYkDE/HQgPjbc98+4ysI76pfe7EntAYTdPWtrEJ8TFRttsN6ls+fXPFFxcXnD59l7PHT7hdr1nMZyxmLT60qHkuL99yEhwvP/iAmIXbdR2zmmsqdZAluVKzdA6+aYr9cjfJaI893mPY6sY79AvXzijBiLkgYwSnFO8xrcNbCiA54U2qV/UdjW/oGVkpqDp8M0OA4HwdoFMMGyo2GoceUqTEVIfmbHcH7a2KkC1XSVYxUh4JTWCMA5+9eY2KsLteMT9uuJEbMonjs1Oubm643e5Q+wLyiBNFqL3jjResookAFB+I4hiR2g1ldTBNARh72CrBe2wYCbNZ7azpe6wYcxU2MU4qh7rwe5JTvCBeaedzwskxer4kHTd0555+bvCsocwLn7/+BGPL85dPWH8x4qxhd7NlTSLrnsg3bIx1mE42Tk+WzDtPE1pKFiQIi9kRbK94s7rAzxY4bTDvKKIU8cR+xCHMvCeW2ubcBCX1PUE9DJHl8YJ0cYvdbHG7iKVUpyaJHKZV1f9qRRC3Oyxnypgg55p55oqD7ln2Qu2DN63DcyTolJUyPWfIeYdzdb+FZoKkmEhUUST3lKIU9cyOnrBdXXF7vUZci3kI7Yw0bNBJqWM5T1PcSpWaRZBRYFCuh4bXvuVte0QuV/wX3/kOv/j1r1E//2q+6yv91D+zqQpPmwWI0TiHlMTx8ZKbq8sqgSkg5IqF3isBKvEBru1qtOnHadkLQqEIzBcztpsd+yLVe8dyeQTsWx0NcRNm4h1JqKPEPORQKAsldcJOEouzM9bjQFZj0HrY+lI4WTpSMUIT2PSZ5eyUjz76mLNxzbKds9rcMPOe88dnfPzrjydFQSHlTM4VLxtjpG0bsmSc94TgcK4Kqw2YzWfYdoWb7uPy1SvOXrykCY6YE4oesu1aMk2b0O4znvt/Tx+nz/cOC6aIvWfpbZLl5AmVFBDnKyGhQsIIzjPGaRCLVdeURBDvKMGRnWP09R6cULPOknHk2ppo00yD/UAJ9SgZlcqKh3Y2ZdA1q6lXJsRpDd20g/thIHjl7RdfgIC6mo2rOPaKnr1i6b5ZThUr9b5ioyogFZMLwWEpE3c9rgm0TcPQb6HfMewG4m7L2fER1zc3eBHSlDUhQs4ZR51tkHY9+RZiaei9Z8zGoIYeGSWPtEEIrjBrjDgYu5To80hwHon1GeRUqhMtmXeefnOaKFU7m45mgYvXr9hef8HjZ+eYwu2YIBvSNDiryUPbtsznbdWGmrG6uiFk0BRxxZiLZ4aSNlt0TFOVUOqaTF1VR0dHfPeDP+aHP/oxWqrDkpTujY2rz0dsP7SnrngIgUPSwD4xFXa73V0SM8mZ7idJOafqgM0oY0S6BeobctzhNZPGnuPjI1bDeoICpiw35yq7SzZNjRJkUFa7juuj5/xse813Hy944+Z8Xua8zYvfx2Ud7GvhQFPJjLfXNI9PyOtb0rzh+s0t69WOlGrrYbnHNu8Pe56kTjLpNaUUim9rOSVSs7I9BnNwHoK6gJvSfVWPaQHv0LZBZi3WzcnHnqEV8lnLroPFXHi9umQ9ZrTViWxUjo9O+MXlBSd+Rnd+zLy0jIPjxX/8R6xfXXPcNVzd3tKcnzNfHBGahpISKUVaF1ive6ahZ4wxcnZ2ivN1E3z+6hXvvvOCtgn02w2dJd79xjsUAq8//ZjzF+/w3nsv+dnP/j2pcGgwAJsgjHpwvPf3dKd3zrRMmj3RWk6VKdOn1MCkopPKSaqTi7nOZs0FUaFM5IuYUVDEV4lXWMxRLVVGEjzmheIz2aoQXHKh9CC7jPcjMoyYOsqYKGo4FdLQo1MLaiWZ6uQmgGL7QSnV+XrnCU1DE3rWmxXgUPX0VtfDO49KHTlYxRlTsBAgR3LJpGFyFNMhds6RraoxTAVvxvVnn+PVUd7p+ek0lOTZe48Zdj11MEk1ncgLbZoJWoCcKuZdUqFkQYuSU0alwTnH6eMnfPjJL3GDxxFY32xwZuRxxJlBKXRNQFG2mw3Lsycgge9//68ZxaFWB89kqXNwuyNF2ga8h+BpGw9a561i0N+u0Vh1lhbr6L23n7zi+fExabOlpCoZbBvPdjNM5GJmNp/x+qNfQz9QJoJHsh1mUNg08xVq55tqdY7O+wpXjXFae6nZq1XEWNUxm89rc8d+JqnAMPaErgERkhXGfqDx7iBRxCKr2+s6xMdVx02pigAdE/SOsHOUFlg5Vtrwt31L/84fc7XOSP4WP9s6Pt10X8l3fS0cqJrw0Ye/5lu/Pab50/dgZ7x58xkl1QytoJOU8b4caa/RuYezSEFcg6RSyRG0kkzOk1IdW7XdJV6/uawOWRRVjwuOZjGnORKaZkbwnjRrGGeZ3958xnYwdtuEHCu2yLSzymxiwjZkZi9b4lj4ePWGD56/JDWefKusrVCajk3MjKnQOM/x8hhyZhgGrBi3tzucdxWqKLX/VyfxuVMlT22sR0cLQlZ8E/j5R59ypIAa81nHrGmqgoTaUlgB+VyzgAk/vI91co9osgnAl73oewoqOn0t7MtZq939MpF6OeVKGOSauYl6BM9qveHk3aeU7EnzUHV4rXD8aMmjs1OGzZbt9Yrhix71Aa8OCXWIcWRAU0bN6LoOMFIu9Vq4G8qylx7tM5wmBE7Ozri43jDHs9n0xJRoQs3XhzSJKKNw16574PqnoExlgLXON8h7bbAIiHG12xFcxV6/ePO64tmpcPH2qrLNkg7ZFeKqvhTBskGqk5iG3YBbznFFGdZVVbAdtoR5g60yT0+fs/p0pCSlv13TmSCloBgxJXap5/mTRyzPH5FRnDhC8ISuZRhGcnDktkHbps7yDJW8Kx5EciVUC3hRYooTiVim4dO1xbaW4omSKr495ljHFarDYYRS+PzTzxiiTYRoOqiVDs7zHtZWrBB8U4M40LYtuRTW6zWPzs8RUWaLOdvtjphT7Vhy9867VyRUSV/wDa3zxH6LE8Oc4tuGOCbE6b4Iqlh8mcr4MeP7QtoY0RvOhLHM+XQ+J4aG5c07/Hq34mLzNRom8vuaYRzPW1zv4PNrts8cs6CUpmUYM+Mw1s1tcscuMMlj9lnTnmHN40Gq46B2HU3gnqhgJbPb5alBSZCQkezYIUiq+EkZNvQxkE8c2xOhP06wKMwXO86XnsCGnCemUQKZY1Ke842X/xGf/+oVLxfnjDMhHXlyV3udTZW3V1c8efyIm6vLg6rggw++iVliGCOff37F9c2axcmSRxT6cSDmgkN598Vz8rAipszx4ohx3DFeXeHPzigx0xMJrp1YeDBcZSYNstU2TtvHHAGtIpmKkx4K24kAKYkvtSlzN8uRqfUuW2G+mLMbx0paSdVtxpJx844hrckLR56DHMH8/SNSo5yFc+aXgdM/ecQP/+pHjFEIcRqS5KoMRbHKiufMLISD40yxzjuoleKE75pjcbRgdrRgeXTM1c2m9mBbwfl2evR3splUbGqhvdt7iFSyyg5elRjHQ7C2abJSjJnNrufVq9f0fWXsLy/fMg7DHf9l1NfzAW120AS0n0EesUVHCYbLLfOlYxyNpV8ia/jx//0hj+enNINnfb1FNwMBXx1azAR1oI7Ndo1ZwbmW1faGoyen0HQM1yvGJtAtZ+TFgtIapVGKm5oBpErqJI4M25G08EgSdDvg1JE3t5y0jl/84u9Ynp4Shy2rqxvGWCfta04sj1q+9c2XXN+sQBK5GJamc8Q+iamDaMwKYhnvPW03qzMfkIPIfnlyTj9G1Dl2NzeYTA0rbVOd/kRwLo6e1alTgIjn2eNzToMnTPDd68/ecHF9Nc0b8AzjSJkkeDYMlQRbKx1t1V0nIfWeZEqefYMf/VC4eHPEyxcvv5Lv+lo4UIDQeUpKjDcr5i9eUqy2Q6Y0Och9xnlgjPcZCNw5gDt8dP89+3z1EJ2wKduqZIpA1Y5NgnlzQnY1ao+uEEPB2sRsFjmfbTn1iSO3RSxRTEkWuE2JTelxx4UhRHIj3PRrTs+XJIHHz56S1ztyLoBWKM9k2mj1/rp2Rs4X+OApuZasaRjJMVGsMKbE7OiIt599RtN2OAev33zON87OePbiOb1FyMZ2t2PYNxNMGlG4D3tU7Pbua9Pa2l1sEjlI3euiablztFbxvZyn9s5cwNfSWryrwvw2MGoh+0T0sHx8wkVe0ZSWYRCEiGfDd//yz/jb/+MHlUGdmG8VIadEjjWjy3AnubqP3R5YVTlg4+pkwo09Kq5OJNrf/0RYhCZw0LRRJTT7daiV8iSfmVi0g4JhKku9KtfXN6hT2rYSkc77O3x1koiJVv0w1FJVYsb6EVsL2QrjAJvSUwI1A0yFMUb6yxvCKLiiENPU3VMrBhFo2m5qEXa8ubpE25bSTO2gC49/NGftDOaO3E4TpVxls51XQnL0VtiNVXvb+jqTqG0Cmcxqs8Jpw83tLY0PiEWOFh2b21tEWv72b37IkKgOD2G5XFacfuqeK6WQU2boe8acDhURpVT5XevxzrPZbuuvsVFBnUdDqKP0FMR7cIrzru4z5zCr+KvrPCLuIHF6+uIpT955BgbjGPn5L36Ba1oKRhKteyoV/C5y+KU5EkjOKE0kaiGsYLHYjxT6/exr4UAN2B473LHjZPkEOV5wcv6Y+MUFISi52MTGlwOjXPG5csCY9tSelTuJ0H1T1buRdbCX+01j3eQACRymWU/jzIo3XFOY+ZGzpufUbTgJA45ENsdoHu8chuJDZHG2wIpj+eiM/Gpks93x5osvWIrDqxDHyOXVNf1mQ8kF75RcEuoqXmYYs+WSMdbM2bcN692WxWzG5cXnqA9kKzgfam/5ZsvRyTG2vkW8MeuWlaQyUO8Ow5wBcsps+5FhGKbfTSMUa6bs7N7vwbGamR5IplynV5WpEyzFyGIxZ+gHzKapl64OJG7mM7ZlQOaesSnE1shzWNkGGbdsTHmyXCKuIcxmaCuUYfo9UgFsTFWEnu80rbVFsv6aEvalu4BIHYp7c7Pi6OxkmsC/o3JBwjiMOFc7mZxzU+lfDo63lIKfxs3JtHdE/F2RM8VlQbi+vq4s8kQSeV8z45RSHZNX7g3IlmkK0JSRmlUJmIig2zozVAbq0G8v+BDIQ2RePGXncUPCxly7uFJCrU7xb9qOtpshTQvOs7PabRYl0b3zCJaKf3fO9eqSsTXSzMhqFD+1V5bMf/YXf8bHP/+Y/5e694jVbF3zu35vXOlLO1WuUyfd7r63E9gyDEFiBELyzBIjQEiewBzPmHqKhITkAQJPCDMYMENCDJBpuRvs9g2nT6xctfOXVnoTg3d9u8rtdt/bV4TbSzradXao2nt/az3v8/yff9jLDj0oMIJx53l49oBq6BFlTxgi/X5H6AZOTo/QKVKbBZ998SUvXr4itsPdMzkOPSFGyrK8ezaVlgxDj57UfEpNir4MQE/pC4Yo8gEJ+eeTVqHqiqRU9mQVAqVyYZUh0cdIt++59vnAVSlwNl/Q2HxQVdbwB//qHwLw4sVrtn0HWuGcR8WIEQYvFD5EZtUx8Z0n+h7lE6uTv8ZbeCUFsU/EAs6v3/NwtWB5fMLV1TXOuQNKlU/2lE1nhRBo/eHb995PGtpsWHC4pJJ3xTYGd/dwaKszh1GqbCir5B0JN8qJjwYf+Yvmj0tp8WEgHpQ9wuQ8piRIPhBGByJQlJYujIzOIUQ2TK7LkjdvXiOFoKnrvKmdNubrTYcQhqPjFcYo3r15jUqBvtuD98zqkhcvXk05WrnrWlSGN89f8OQnP+HV8+coI5FSYbTGFhaRAkZ92MybIm/346zgoPpwLrMB9vs9KWYM1VhLURb0fY93nqQUPjjqytD3LfMm5+OEwFR2012xHZ3DBI8LHlFYZJHYDXtMDUpnXHd2tEKGmrjN5sXSaoT0COHuivg0CeeDLRsJ5NdTigle+DBtbDY7HiMoinKaLhJCJMZxzM3mZJ596DAPV66Fh6KXl2aHBQZCoJTEGMPt7YYEWFuAlKxWK5SavGFFLsCZ5Mrd35XgzqVJTv61OkHyiTgkxnbEkNBaAB6LoJSBFBQyREYXUYj8Y6eIVIL5csGjp08QaqLaVRWp1IRaEUtFaDybYsAbSWxANgltIiiPVZqQJNt6y63d4o4ksc2/WiMN18OWf+WLL7j+4z/De0/d1NkxSmTc248jl5fXXN9u88Em8iHlg0crzdD3pJTdtYwxd5NVVRTZnzYd5LsSIRNSa6LSSGsJUhILjdfZCtE0JdFmw+0UwcWEdBHpBUR5x2V23lPNasSdZDS/yMFHHj99zHa/5/ziEhFDdiYbEjJKxCDR9Qg2ixJUcBSbj16/v8L1G1FAfYoEWzOoSFkrQhhYnJxgX7/EhJhPIBTBO0IIuHHKqxnHu7EJuMP5Pr4OXZ1AUBTF9FBlW626rtFFgdSGZA3RaEalSVHivUAGkCO4XtHpgp0XCGnZhPxwkQQey86VbHrD0VXL+XdvefjpCVZo3l3d8PniIVtraTTMZw1+v8MPA9F7lBS4cSTEiNaGECVNU8MwctrMGMuWz588JQBXV5fMZnOIESXkRAAf0UgiAmssLnpC8AzDiB4Goo/TeJkVXFJKxnEgxrycCyGAzAeTVhm7UlJm7bVMzOoSW1gSkuAH9vsbqiorXZxocE5yeXULZLs8RTZzjiFiCwvWgs2b2nHo0THRDw71oCB1IhuvTHCBUoqiLCiLmuuL87u8ICXVB/RmMr4WUmYMdFq29cPAD9/+gCkaYjw4uStimF5/lSa8OqGnMfNgVXgHCk8FOXNn81QyejclecYs453oZV3fE4f876dpCkoTX/lQ+RN59BRKIZxDeYcYFHIsUYWlVpL9MCKNJAbPrKlI6y1xiOzWa0opiNETRpeXilqwOD7KC5Wi5NXNORw10FjiXBAbSWc2+EWPXEhm1chR3VLrnpnJYYxDhNbdUhwNdOUx4yZRFJaGOfq252p7zShzhIjRikJrCq24v1pwfX7O+fklQllU8hlXTxE9de4phJyuGfKmXCl5xzEex+HDc6g0key+L0TGZ1NhUMsGUSp8rfG1JVrNiEDGmA1UBonuI2bMDVWMksVshq4MCP0BjkppWkTCUWFoZjU/fPd9nmD7gBEWIzXl3uHbkdi1zJoas/1rvESKSGSp8gJBScKwR82OqeoZNzc3hKlQphizemHqPmMKOZ/IHGgqecQ7xAgfMMYMl32g98SUT3fvAzENJOUgGAiWZCXSCmzKKo2YJKHvGUXi0tZsjaG0VbYxI0OAPlXsu8hXP31P2IDYC9QoOa7m7G7XVLagFoKr8/eolI1IophoLlXJzdUNc514+sVnvL0dODte0t+8x2gHwaMkqDCynFWTdl8gvWcUYJRgf/6ex59/yqtvv81jfcoiAqlATqh9qQ1aK6w1kyt9QEiBc37qyibqTggE59lPAH/bdYyDmyhOeeTvegdinZ3YtSJKhbFZPvfo/hFb23A6L7kWI46sflKiwI2B5ah4/9X3PHn0OyiXMOTtLjGbhQxxzAulackX4oFlwYTSZFmvkpbgc6qiEAoXADciQqQSBhMjoigJB6x0qm0Hq7bDYuluCTmN9OmAjUsFUeLcCEqhjEIaOU0qWQV2oNSlOOHA6dDpfkDgD4YeoR9ISiKcR/YZ4zNJoJPFO0dd1iTn2F6tM6Sc+GBMIiRCQz2vSLqEUrENI2lZERqFWEbCTCFmJanuWK0Sx3XLQ33OSRmY6QERBo4ff8k/fb7F7zo8kq0r2DvQ3Y5F03DbRux8hpADepet6ayW2Kbi3iefcf3Vt4gUSMnmri5EEpE45R8BkzTZ5eeyLIgpEdxI3TSYomC3b9HWUM4bMAWxtKTSEuYFroA40zgdcapDSIGWBunzvRyFAFMilUfHxKOnj6f7WB5e3MwSmdRtwgdKrbFVhUqJ7bDHas3ZvTNudjtE8igXePbsi2zC/GtcvxEFNCmJXzUkP7KaN/lEKzRPv/ycmz/5vxCGHEqWpsygj8A6MeGKCAhh6ljyh1BGYUyO8NBaE2Nku91wtDhCSJHHVsgOZCEgQ4AxIHVAbh0p5LiRJlhctHQ+Zj7nfMXN1Xl+gH3kwckj2qs3xE3EdJKylZjOs31zzYkt0cnR7Xr6bcfYdqQYJg9MQQojhVY8vn/Kd99+QysX1MePGQdBWS+4ub7g6PQBzXzB7eW77HqfQBhLicYgWN9cMT89ZraoJypJTjGdGqlMN4l5MVKKHL/spjRCSYQJlI8hok3mQQoh7nJ8dKPvFnZx6hZTTPgk2Y+ecrnkbFbx8LMz4vw+//t3/5SwrwnLhB9T9tWsJMFHooex9ZzvnrMKM0I7kjqHGDxxGPHDeOdaf+fVOYGRcoIjQsgHQFFYREoMQ49c79GFwZYVSQgcAaMOWOdHS6cDHSrGqVkUH5ZKQn5YAAlBciOOhDUFRWnph55ZVVEae+cfGqfuK8Y4hQhmWlD0H6SyeRkFMgpCiFk5N50SfZt9ZIfNNhvhDAOiMHfmzMZaSBFTFURdQlHQ68QtI71IeKUICkaTkNWWJ0eOlfueh8OWT+WGe37kyXLGftjz7PSU869+ztGjx5TXPX+mNGsf6IeeVC8YWw9aoiQUVcGynqG9593bd9w7e4RSOdsoHzzqbvK7WxTB3egO4m6pdAjOc85TVFUuhNpmwUqpibVhbASx1vhGkCqJj57r20tUUqggWOgZdVMRPVirmVcNfl5kv1o5wSUxQynZGSohnYcQefLl57x685JZpXj05CnVrOJmu8ePI4+e3gccX/2Tf/Zr1a7fmAJ67h3HzYyurLl9844fPTC4YURYi4hjVpxEJoPWMOFa4sMmPk2LooOFHenuQWz37d1CQErJen3L2dkZTV2z2e8oqxJpLVHm6OTkAqF12UCWbHBAH0k2cVwvuP153pYvmwUhOLrnW6qdxu8CZpDoNqE7z6PVEX7bMp8tubhcU5cltVUTkTs/WEPrmNUNm/2euq5Y37ScX17TXW95crbkdrPj6JQ7HFOkiBSZEH2Id5dS0K9vOb3/AJBcnp/nkXmC95wP5O77A1neZHv4bMv3EeyRi4rKWvgpfiGSHwDvHCHFqQB7vM9O5fPlEVYG2q6nagRHzYI2JsKYDY69gHHI5iTepzz2EZDKo8aADukASObO886G8GC6AnehfNMlBAz9RG9DURjD6viE7168mewHA6N0d2O+VprDyfvBj5W/uHjKzC8c+g5VltiqYgw+S1iNmbqdiIwJeSig3nMwQEGIbCxyx4Xk7nePVIQYc2crYdbMsGriqw5j/vqUsKVldA5lDFJDNZshixrKkm/efMNgYSjBl4FxIZGzjvuLgU9XkX/jySNOmw3vf/GcTxclT1eGi10F20tWqmPPjqWeUemRwVToUtH2DjEMKBeppmdncXTEDz/7OVpnxdD19TVCprtww4wXS4w1SJnxYiEyE+JgECJlhjNCCGhrYVoqxRTzHkFJkpWkUuIrcI0gVIHLm2vEAqSPNLbkZLakvXTs1x2qqjh5doafFnRMoY4ikQtoL9A+wSAQPuOyqioxRuFU4s2rF0TRoSrBfmjZvHrO0cni16pdv7SACiH+K+DfBc5TSr83ve8Y+O+BT4EfgL+TUroR+Y75z4F/B2iB/yCl9EtdolShGE4t5ylyvV8zXy1JjeHizZqo8oInxjQpDz4sLP78dcgwOtjdWVvcYS8H9QnkRM13795jC4vRhrHrshxNZe9BQcIAMiZ0suz7nllt+fS3vuSoPCE2T6eHIxDUyPPn31KFmmG9ocKw+/oVZ8sVZfJc7/bEVNB3I2Lc07c9UgqMzl3GvIR7z57wz/70Z8QYWdUFbv09Tx4+5OLigloGIHJ5fk5lp1EFiZKCsR9o+4EUI1fnF4ibK45O7uXOSx06BbhbW6eEnjDKOyPliU8npy2pMQbv82gWQiSIrBJJMSILgxTiLtZue7FljIlmvmCxLLnZ9MS25UcPf4vL2xfERcmoI3vhGYhcXNwQk6IdOuTYMpt5fufBY55//R3BjSTns91fnOIxDpgiTIVy6kSnYvuxukpIwcXFe7ScTCtE7vqICR8cQfqJDnRw/vmQmSPkBP8oNYWQKbx3iKqkXC4y6Vxa4jjiiwLXdRBCVgj5gPABScperyKLaoNMdwsqPWGiCkhSUUiNtII+Csa+o+32LJZH1E1NkgJBzK71gDAF1Vzx5MsvSWVDqCyuUoiiRB4rxDxSnkRWtucTu2YVeszYY83Iv/Y3/ybbN99yu99x9OAZ/8effo20S0av0arEJMFZM2O3c+wHRxEVMgaYguA26zWL5Yp2t6ZuapqmJsTsCHXnoyoOUFnMo3uKBJmjd+KBTJhyOme3H/ExkZQiWkMqC6QVUETagZxXn3965meW6AcUit16w1l9AjNJU8w4Waz46vw7xugyLKJznPHJ8oiT1TFx66ixqE4ixyznXD55SDu2vNvfMBYRtRCc3HvM7OgRIgref/PDLytTf+H1q3Sg/zXwXwD/8KP3/T3gf0kp/X0hxN+b/v8/Bf5t4EfTf/868F9Ob//SK8mEfdhkeyqlCaoiKM/F9oeNqGMAACAASURBVJJkLWkM2YU9MB0zuXEI6UM3kcine4oJqcA7j1YglGDY5+x4REIQMabAOcfQDcg6k4xnTUmOsBQMfiAOKfsoSoWJHu0144t3lNayHQVVPWfwPUN3w6ezI65envPp4iEXby9ZLUpE29HIAn1UEsOStH+LcQtYzUkxoE3Bd9/9wLxsYBxotKHtRjw9pTWcVpJiVZNCYOj2PLh/wubmAhAMLpBIxCSxZYU1Fm00Es/QbQ9wEHKyo0mHzbPImKL46HeIMNPflcf+GGKOmpCZDC5ELgRS67zrnLT5Gs1IR31yxOhHophjKs27yw2f1BXhZjdtjxNnx0u248j1JqISiDFS6YIiKN6/fcezh494//w5Pjg8gBK4gZx55XNOlPN+ahQlWmWHqGEYM4tCSsaYiCm7+OQDQ023Spr8sA8/8+QpKzJ1Jk2uXVIrklKYwoC2UzopVEdL3LDLnrCdwFkNsoRhhJBpVEJKwjhkrmMIWQU3KXLuPAakIAmNEJGq0vzk9/8G7y9v+OaH56A1+3GkXi1zKqvISbNCCZIOzI5XrLuO2fKMaCO+MIgZxDJCIyjTmpkeWVpPI1qu1y2r1Zy1WvB6r6hLy/k313y/KxnljDEtWDuLxFAiCMWcUhvcbKTcS6QTOOEZxo7T1RGxb3PGrLVYp3ByMvLhMLKHyW91UruFOBnm5AJ6YEKklJVIyWhEWSIKSywso5J5mWZyLUBEZrUh+UBZNjz+wz/kxc9e0vYdYgQfBvZ+T5BZoTWM2fDmzf4t4hWcmWNmquLze0+QBK4v33P64D7hpsdFSec8n/zkt7HNHBcVYoy05mOf3l/9+qUFNKX0vwkhPv1z7/7bwL85/fm/Af5XcgH928A/TPmu+UdCiJUQ4mFK6e1f9m9ELeBegagEYpcNFd7ub/jRH/wev/hHP0UXmpAynSZNHRikbKU1bUGFyJvoZHU+/YxmMWs4OVnw/t0VwzjiQ8D7KfFy4qghwFqBH3siCuEEiIQfHGhDCgmZPHWwfP7lE8RVh95t2bhrFos5K7NCpsDR0jBev4fLt2yHltNHj4l+w+X5QDnfc7Uu2N++pRAeoxVV3fDoyTMaG7l4/4ajo4rlck4g32z7tsucOmO4fPeGh/fPWNQV+7alMoooNGGSvgU3EPyAG/1dZzWMLhutKElRFCilphjm3MUmIRjGkV23y18j82JOWZNzvMsCWyh0WRGUykuSSeueUmS9GwlNxcNPHrGqLFcXb3h/fkPQBc8eniKiRm5ahEioqGivbim2Y5bZ+UjvdriTxP2mZP3uNfeXM+Si4uryAikkxVFWEVljiUJwfX2TPWBdTudMKYHK4zAhEkQeg21hETIvw5RWhCnMbELlgMPBkf8sJ96vMBa0grIgao0sDX0c8SclvnWZl9hFeqmwTjBXc9h19JsIvct8WudyZxazzSLTAlNKgbGWp8+ecf7iOY8en3F5dcH3L96gqxIHyKqgl9lwGSlRViNSYLGaUd07opifEgvFetjRx5YhwjAJDmo5EEKLR7ALkmV1xk1ccd7f4zu3RwXL1+d7Nu4ht52htEfs2grfSs4ePWP77g1yiKw2efEiQ2BhS+K+g3k+MMOYJcay0NgDZzqlO+aLQNzhtvBhWjgMEFJOzl1a5+jjoiCVlj5BU9fICvoioGqBaASRHaPLto3EkV27IymLLmDj96RKgBF03chykalSMmTJrI75gHx+9RLloF5Yrt2Wtey4VR32tOGNv0Jsb+l2AyYqFqfVLyuFf+H162Kg9z8qiu+A+9OfHwMvP/q8V9P7/oUCKoT4u8DfBbCrJVv2FIuSFD2eiBOS89tryqaiNBpV17TrNX3X4Uc/4WMHmlIeV/shr9IOOUiXV7f5tGo7lNZYa6hKjVSai8tLlNQYo6lnubDEmDeswxDo+54wjBRJUChJGQKqSwQiq2AZtzv8CLGeoxTZNacbOGsqwjCQug6EZHtziYuOciZYrh4Q1lu6vmN9e81NSBRS8uOffIFQ8M2f/cDoI0Vhqasqc1G1IUXBV199w2pZ8/DJY4LzbDZbjFbE4CjLEm10ji+OuSuakbFMMaW7DeNI33X0/ZDNODi4s08cVyRjgDQGpBYwBEhjdo+XEGx+K7XAKMVOJMSiJpY666VFoC4MrQ9sdi33HzzhzQ8/BQkRxe7tFVqY7K6TQMXIxcvXlGdHUxxwwg2O1eKIlCLbzRqlFNZqknc8fHDC5eUlZVFSTNrqsiwJKXF1dUV7ULZM/E1rSrqhI02RMHfmydMBEyfPWCYqlZyczIPWUBhckW37hkZQnxyDEuzPzxERjFeMwYMYOTo9wm32hMGReomY7NTiRzh3ApRIDGMuCIWxRCkJE+6atGJUghB7zKzMER8SlDJUDxaoeUPSBqdgvd3n5YtMCK1RSqBRSFHQeoFWK749d4z1ij4uuZn/Hm9fXvFyu2CzjxydPOJmPyI2UA4Su9OoFzdsWpUZJNsW1Q08ePyUbtquS2vpu4HgPV5mo2o54eJw6DOZdhKTeuvP0Qlj+ufDG5WURKWoioLtdotqakpRIYHdtkUVEiMqHj94gvCCuqiBAjVBA0ElWt/mTnee8Cm7V33+ox/z/B//nG038NmjL7h5d0MQsB13bP0esbLsRIdMA9FDGwZ0UDj+fzITSSklcZgJ/2pf9w+AfwBQP32YzELhh4AuJbtdx/dXW5beYOWh6wmUhcHKxKh6uq4lSvWBsQyTGXDGmQ4v5L71SF2gC4uUEj+O+KFDaYMxahpzs9fk2AcGN2SVwjiSkiDQgYJquSCsN8Tg2G1atttz7n3+I0TbZbzNe0pTgtaYapGVfy5i2UIvCe6cKBqS9JQzRVXPUUmiKbm+3hKiY7E6njaKWdWSR+9IUZSU9x8Agaur9R2epmTmth6KpCCASJm6hEB8ZPRQGkmhCmZNmeOVJwxrcAHnw+QEn5MMkzbIusZrhS8tqZTEShFsxqdiiDhraEzD69011W5H4T2VNWzbLVcXl8iTM5JXWAn+do/qA96PqEmSGKb8nN16z7yueP/ukr5rUSJ3K0VRYIqCi6tbZlWBc1uOVieZyzrp1C+vr0BKlkcrVqsVB838ZrOBFDhe5MVAURRcXV3lglRXqImeFRP0LhAEmNogrKXVCTHT9GrE1YptMRB0j6k05dMGvx+RqaBKhuXxgqVqePHTb0i9x/sBGQIKQaFlJpVrM6mhBC9++J6mKHh1ecnFtkeUBakqcjJno5Fzy96CMJqYPIWWcFoSipKULCiBqgxpgOPjY67FBhclsbMMQnGJ43I7UlU17142YCsuLhPvXyhsccpwueP6xZ6mNNSDhdbz4k/+CT9uTnixu8JJSyUNqMC7d6/58pNPET6xOjri+2+eQ8jY6Og+iBEO9KXEpA48mFVP9K00cZCnth9pdIaG+hJVl+BnmHlJieXB2TP+6Js/JdiIrDPb4+d//ANnx2f89qMv6Tc9L1+/QRiJVILNbo1sJPNCEok8eXif9999RbKSz599yYufv0EGgbDgbKSYzRjUyPK4oUstvRtQVuM7z2z5/9IS6V9yvT+M5kKIh8D59P7XwNOPPu/J9L6/9BICRtdRmDmOQO8dKUI/5DAy6UaMy7G1RktMaVnU9o7wDGmiqki8dzifSb3e5yhdHxzejfmFFToD3jESBofzAlMsadsBrQsq29B3LWF0LJZH7NoOZTVvnr+iW6/54tNHzOqClFZsLy6Yr1ZAjhAJ/ZqisiBnWZUUrnn8+DO+/uErrJkTfGTd7aibhqZuMGj8rmW92VOUFYMfSNoxLxcgJ9w2xlx0JieQQxgd0wguD4F3U9dzwKU+pv8c/D+VyrigUdO4JcAUejJwyK43oigQVjNYQ0ukLSVjpYi1IRYSN9mhdZsBo+HRg6fc/vxrWucJw5idd4TEjSPW1Jy/fkEKjhhy3lA+8SJPHj7h1YtXLMoS3/ecrpb4piKl/NrFmBj7HiMVPkDXtVxc3hJixJb1hxuHxGZ3PVkbZlOQ7D0Z2WxvM3lbQFWXCKG43ewmPDxLKB8+OEKXBZthgKqgWTbEecnN9hyzsjDXRO3odSajS6XY7UYYRs4WJ7g20rkxu3oVFSJGrJBond3cpZT4kPXhCvDATdsRlCEZTSwModTY0wZqySBGhE4obSgXM8Jc46VCRMnl5XtevnlOMpGGY4I0jEkQnKXtE4MzKFGRxIzF8Y/4+R/9DLdP2FSjtWSxttjeUxSSsO2IfaIaNFf7a74oVnz9/hKQmV3Qj+zfnqOFxR43bPf7HMOisyfCoZPMUTIf6GFSiskJXmaP1ZQdnA7JtmJa1Bmjc/H1gdCN9Nctr3/2PcYFYu8wSSKFx6SK21cbjtM1b1+8pnMJpyOpzN179IkhBY7PjuiC597Tx8TOsrnusvG50uz7HaIsEEZgagOGbOStMxul7x3R/H9LpP+fgH8f+PvT2//xo/f/J0KI/468PFr/MvwT8la11iXCSaLzLMqGsnCYMaJUNq4ILpC6gegcaiLUF0ajdTYdVhORWVqN1dkHk6nLKgpLPwx38akpwWbf0vtAN4xcXK85Xi0Y+j2dA4FBCMlmvcmBV70nJEdqBd35OUJL5mdPIEX85TmyKJBFiVTHeSz2ERiQSSBUojIlQgVAspgv6bo9drni22+/597piupoTnKRcWyzZ6MVdPstjWmyUbL6YOUWD/QYkWWbf97n8+BCT8pd5sGxSgjB0A/Zqu/QNRzoOyoXmdxFJJLSyNKibGRxf8U3txfYVcEu9ZlmFBKPf/u3KJyif33OyeMlxbhCrDtE1/PuzStEesbZ0TGXr15ghSLGQDuOd7Szt69e8uT+KS9fvOSzT5+xH/ZcX17dZUgpJamqEluVKCmYL2Z3Hgi36xukVISQGMechnlQvqQ0dbdwh3Nm415HSi4ftxNmLsbAfnDIssDMGqROk4XiSHVS88Xf+h3+9NVPqRYFW9fSDR6lBUVpwBo2Y48ZAg8eP2B3tSXuOsK+pXctIqUcZoe4U1rtx4hKIxib+Y8WQqGJM0VYCnoTcIUEA2WhYKbo0p6b9Q33j5/hXcunD+8RJCSfkLIgipJUHfPu4j2DF9T1HC3vs/+zgb/x+G/x6tsXfHb/KWHT8t3XP0N7CK7HehA+oYaIc4HCdvyO1Xy92VC5yJEtmS1mfPOLn3MvPODsZPGR/eGBHZFhoBjC5J3wUUeaJmOWiUvsfZgUZAmSILksWmDMVLR409F2I/dPl+zaNcFFkiqJ/YjWlsvNW4oE0lpccGy3I/WsQKeAHDRHq08QSfP2m68pnOLh6WcMb3bo6Em6RAuJlxFVWlq3J5GyACcJvNWZ4fNrXL8Kjem/JS+MToUQr4D/jFw4/wchxH8EPAf+zvTp/zOZwvQNmcb0H/4q34TVlt96+hPSKHm/e4MYHFLskMkjk5gMdCdailT4kNP6Rn+QD2S8yU6LEqMVSuXT0FpL10/GBh9RdpaLGf31FV988ZTzy0vads+zZ494+fItfnAoma25tNKM44hKTJZcgf31GtF3aGt5d3HDJz/+A3yQ9Ls1ddMghAFhSUIx9C2Flbx8fc6DRw9o9y3L1ZLtdktT1wwhMZsZqoWmdS39IOhkx2GHmaOPJwu26UfNdm18GN3FwTD6QHPKlyRvg6XMiwA74aR3f1d+gae3eRudlAIZkZWBKvH1+Rv6maYXA/JIk3AQImoV8esROYtElx2aykKRHNy/d8SmveUqjZydnXH7+hWVEHlcBhyR5AW3V7c8uH/G9dUln/3oM4Z2PxWwiPeBdt+z23aZ86oUSuclYvabPqiQ8jhHilRlPfmI5t9bIGW2xTBkmeHHkl8O4Xr53hq9J42OvheoKnLy7Jg+jISyZO9ajBY0pqTfj/T7gZUp8ENApsDYtyQi3o8ZP/agkqKaW4pColTB1fUGKT1SSc7undJrcIVkXUIsE0OR8KUk1AFVZb5iV3j2ROrThmgltizo1rcoaZCjwpSabjOy9gOFqKmUYvt+x+5mz3HdUHWJR35BdRURwVK3EtU5vPOIlA8bGSJi9GglUFLzyaMztIA0jmzSjiefPcn3l04MbYfUMjMW0kekeaMARSQ7Ijnn8JPQQIqc1aSVQulsDiK1RhmT77VJ8nuxblGj4tnTz3hxu6NrE7thxBLQ2nN2tmK9XZNMYAwDi3mFd2QJ8jrw9o9+Rj1bwcZzenof3QnGm56u62iWK7SMEAQqSKSH6LJQ4uxsxVGxINz8OdD2V7x+lS38v/cv+dC/9Rd8bgL+47/ydxEEYgsmGT6/9wmvr77Hd4HU5QyYFPwHNV9mek8UwYOGOb/tvci8POEnYwNFSluqsqCYRt3gRpTUCAWQuL65RIiALSRtt8cWkr7rESiUVJO9WaQsK1L0xBBouw43dNx/8pSTBw/Zth2qqBnHkWbW3PEuhagY3Z6+62iqGq2z08/QD7lTFGCVZtY04EeePXnCT3/xHXJZUdmay4s182JGabJ+OB8T+aceRpe7LiB3ApJImKJ7J6rOlEIfY8pLJh8mDiUHntNH3WzOrQExpZFKgkp4BUElRgZEgrN7K6ySzFcF19e3FCaBkWBiXjJJiZaJ26tzHjz9nMXyiMsXPyCToKlq9v2e4D1RCLa7Hffvn0DKYgdrTHZO56AOmojoApRU9H2PlJLeuWx4Uc8Yhv6OjuVDYN+2+ZCQEj8MOOcRE1astKIoiulAyXiu1CrH4NYlzhjsYsXZZw8R90tuxh3RR0QxRduF3IEV1pJ8QCZJXRVs4wapDTFl6aFPI0EJTh6c8frNc+ZzQzQCUwg+/eIZN9s15WJBNW/wJrKvI3s5Eo2GIqFKskADgQyG43snpL1hdXqf5+fXxKHPfplakpTGEgkpMQ57zuoZ5TrwcLFCbQOlaBA7nwUY+xHlwjTFTEbZIsevhBTQZUG0kZDAYrNhtkmkyZ3eTqoo5xxKqZzXJD9MR84HSqsprSbEnDd24NyS8tsQIykFXD+ii5IQHGkcUaOnkIY3P/sFRmW/2VmytMOIkI7bfUs9X+DbkVILgvAINKVQ1PWczdUVF2/eURhLKhJJeswAaoQyCOIItjAMu8DoMk4dCDg98PDePX72T77+K5ct+E1RIg2BeqMRIaH6xOdHj7m8CLThChOzOW5IEZ8m7e3UP00N2HSJKZ1yEpYkgR8DEYn3Ixu6jCEaiRv3ee+sc+icEoZu6Hn96j1FVVHPK6IXzJolu+0OosCFwNxooou0QbDvYfN+izCWhw+PcH3P8mjJ7e01q+W9qRMsmc9Oubq4wA2Odt+y3+1ZLhfs9zuOj48IMXB1e0v0DikV956c4buBoCJVNWOzHdmm7o487n3WjJeFnfh1uTuTMitChBAYrfO4NCl5hBDcrLc0TZN/Z5MqiYlEnshu9ikE+q5lPyocHr/UlMsGpwdkIelDx/XtFZ9/8gRZCASeKANe5HjcmLLBCjHzAOd1w//5J/8YOQyUhclRwwIWdc2u73EC3r0/597pKd5Hzk5P2VxdTjdFvNvkCiGxhaasFmitcc5hjOHq6pp6OafrunzIFZYYI2U5AyDGClJWwcQUCT7btMUY0cYglSIkO3maWprlirNPPmUoLGNU1CZyvywRqxWXm1u6rqMMFu0TJQqbEkYIrIRt6KjnJduraxarmuvLa16+fsfNzZ57Dz/herchJM/7yys2w4gtBKtlCbVELzWzVUErHeiAmhCoYRg4WWa1mwwamyxzU6OTIHnHOARut3sKY+nHkaOyQvUBFfdEvaEwljC63DCIhPKJypbIMNKlmGOwVZ7wQlEgCsPFsMOGROkFBCiEZOg6hI+Q8kiuhMpx1lMUDDHdNTaTXiOr5KYtfsY/80Skp1C5ShUIo0ghse12KJ8yv9d7knD8/u//Lr/4xXek3YhSgtor2u0VKEW9KHE+c24LB0YKZsOS/npDUQlOHizwu0DVCVwbkKqnJ3H+/D1DARSJ1VlJjAaiZr3fcmKPfq3a9RtRQLU02L1EhIgaEmqApycPeHF5i3cjKobs3sLBgDj3XDEd1iPTJabMm8xvYhpqSQi8z/k2xEBZ6bzFW+9y/KvwpLFDC0vfSubzknpeE2PM0Rs+MjKQqhlSRaTRiCQoyobNdsfoQg7y2rXTIHPAHxMilsTguXe2ZAiOqp6xXe9IRIbR0+23ma8YfdbsE0AKtjctYYjkSBKANHn6RmwxFUprsdYiAB8CLnjGccS7LN08FFA5Zfw4v6Moa4xROWJB5t+NOFi4SUVTl1RVibo3Z1dKOnpS3NM5wbIuuXeyoHWOIxS31xvUJlFtLEWXUDctadsR9zkjqFAQ3ADAth2RMiKFyhZqRiOGLLNruy11VVDNFmzXt1mNJLIbUZavws31NXVdE7ynWSxo25aT0xO898xmDSEG9vuWuqrp2h5rLEaoHK43wRvKJpraTt187rxSzNEQWOg3V7z71lHcX2HEkj7tMVbibzvSdmRcD1hd0IgGPUjY7piVJVEV7F3Pet8yX8zwfaBsNOubWxKSr37xLVKDlDp3aVWJ95J+CIx6pFjNoSq5vF1z2syIh8MoOk6OF2iXMAikl1xf3iA2OQVTRsHZ6oh231OrgrQP5CQwge48MipCiAidSArU6NmlbHItjMRrgVCaQOK6hOPJpIUoCGMg9ZHQZw/ZMIy5Ew05KiZTzGyOFFH5jk8pf08ZKZlMuaXEe5+VXnCHiaaYSG5AoHO6ZwSMxItsHPL+z77mx0/v8/23z2m7kRRLrNKM3R5lNM1qyXazxXrDfFZw/XqDHXcUcY7ZBG4vr+BmSzEOeGHYbPYoIyiiIgaJWyuktlxe7FCpxfS/Zu369b7s/9krjp7hxRXLep4NAFxA9I7PHjzmxWaDrguS94RCT6ddvKNJiI9GiINp7ujGO2dxFzJm6INkGEeqqsQHR9OUaCVp2x5bGpbzM96c33J67wg/ukypURrv/GSgkBgGBxQYazApQwJlVaGMRmnF2HlevnjL7/3eWVY1TQP3bN6A1AxjIATHcrEiuJF2u4Gkc+41Iru5Ewl9pCkqypnJAVlC3XVjaXJQl1P08MH4WGnyaJYiImXIw6eMa9mqyrQgbfKSKGWM0XlHN0wxGULk9MqqzBK7sYWjGZ0cMHOF6ROzowa3CTz70WfITnHcLClEohCJUgnkAEJqbgdH23V88/XXExVycsRSGq0Mznu86zGm5HazYT6rWPgIKNbbTFGSSpLigbKUUym7cY82mvbiEiUl+307YbuGmBJlUdD3A82sxg0jYRof45SWmaYxUgqZDw8fMGUxHc6CZVWhypo4JMTWIYOnqATKKNRNYj4WCBeJfk8/BB4VNZqE1pHdxZpuHKiPZqw3l8yahrIs6PaO4AMxwbPPH/H6/IIH959ijxe8vL1BlTXSCSwltajwbQIFhTTIqOnftxhVknaeq+83FL1HjJkC1hQ1w8UNMmS2mlaaEAPz5ZJS5kRRLUWOYbHgC4WrwBuIVmZXdpMntyt66rMjKqFQXiBvesK2pRvWJJWVaPlgyx2mDwG33+fnN2TBRowBpSSzuiHGnrZtp2KaX09jDIWxaC0w1jC4ge36Jk8UZQVDVtgFKXl3e8P719fIFKiqesKnt3zy9CkhQdx1zFJCDj3abFkNjnm94tGjz+neranGyGbbUwh49/w8m2FbRRwj0UjifocXYBPgE2L4F03Yf5XrN6KAJhdItx0yFQjnED6n6iXnWc2XXL59ydgPuGFyEJoiCvSEsUiZN9JKKUY/Tthn3vxqnTHBi6sNZ2dn7IZsEuyGkRQdx0czhsFxdbMmoVhvNhTGkGKia7eTx0W+a0bniCnjcaTI7fqWeraAQ/FWlsFDP46URcFhVRNjpBt7XApIAXVV8Pb6kuViQYqKkCK365a6qkhEdCGZHzX4fiQN0zJoupSWEwXpELCX2Qc5AbGkqQqYFiQ+5vG17wfazW2OUY4+f5zsLyC1pqiqHFpXluimRtQVazzr7QDGs1jNmc9qNhdXNGdzdCsRQ6A73yGcxg6asA/omPK2VcB6veF4lbvN/EuMOaERSVEWPHx0j3un9/n6++9QAmbzFUwRxMPQ3xVdqRRudBk/jgk/Bsaxz6opqbCFZdd3FLbken2TH9jbLSH4rGIhR0JXZYXRFhE8ZVmw320oipL1zS1j8BzfO8P3ELYKRpPPrDRio8nxy1cjhTLoqLBeYkdJPwywrCiKGUczQ3/R8/7NFV/+1iPevnpP1w6kKFFSUzc1l1c3SKHYbfck75ACuk1Pmzzb9Q3FWYlSGhcG4iSPnB2dUAjN9ZvvqINlUTQUIoJP9Lue9fkli2aO0QYRJmwyRvq2p5zXoATJCKKBsRL0ZSIVElcJvIGksymNQDKmlisEOgpsCkgi0eS02hgipihgWkLKw84lJXK+oENNjme7dsp5FwrvMzdZEXPCybQEDv3A6D3DOGJMgZJigggC0Qu8B0SeEEwQWCN5dO+MoeuwaopSkYm+7QjXt4h2j9ZLrn94zasXb5g1M2Q3EJWgSordvkPHkugTyWQj7zR9//icTvrrXL8ZBTRE3NWaJAwyZVNWwnQD1TPeOU9AZ0VMCJnCoSxpWjLIKJFC4TqHEILzq+vJHcaSfE9VN4QAb99d5Az4FOlw3Lt3zHp9y3bTs1zO2N/ukF7ROk9hLGM/ZE9Q0kTg9mz2e6r5MTe7LfV8zvJohVSKsR/YbNYIaWj7Dq1kHlMF1FXD9uqGXdeymM9x3lOUlqPVEUPf0ncjq/kcqy0+OVxy9CHH+r5/u74roGcnJ3kkAw7ro3yJaVT/KLqEhBERa/JWeF5lXDDxIXZCMHW+MRKSZ2i3dEPHcA1DUTJqiWkU8bIn7B1lCU+f3mf4+oo4jjzUK+SYUF3Adz3rqxvCrmV3u+H0aMn66nrSOOTOTwkxHWoV8/kCYwt+/OPfzUwBslvS0K2RUlFWZeZq92K+YQAAIABJREFUKs04ThZ8wZNiwtpyMoNW9EOOyd21OR5apNztSGPxEyVqsxu53fQ5itl7pIS6LDBjpCgKiNn5XPhIbPt8gAOFVSTvc2rrVYZFyqJgvN7je0drNaK6R2Fn1OULjFaIoLi9HHBjpCpqrC3Zbre0bUtCIo1hf9shBk8yCiUkUghWxsJW5xSEUVJKA0lSugV2SMidhn4g9QNj27G53hK6wMpW6BjBjbgQc2fXac6aKvNMS4WvJI4ev9QMdUAUgnYuiHNDEiOP798jnm8Zk2TwAt+BSQXaWMIQcnG0Ji//os80vXTwFADEIUOKO+1/CtnFS2oDE9zWD462He6aHkOgqSoCTPesn4ypJy6v1Hz+o9+FGPjTP/ljys0GawvmZc12u8HOa4SQbPtzfvsnP6HvA+cXV1lTtN+Rhj33PnlCvGwZ3Q43bpHWZj8HlUtfSinHdIe/xgWUGPFdh2/brA6Lh5xpyCFmkLwjhRw8hiCbbEw3eu4S86ZUSMnJ8cmdaTAmP2RCWaqypO/6KT4YXr54x2Ixg5SwReLR42Muz7cQFfv9Hin+eVpQmADxummI79f46QTVNo8mx6sF12/Ouby8oLQm3wQJbm9uMdKg0RkekGDLiucvX3C6XBJ8wDu4vrmgaipWqyXb9Y7CGO7fP2P0mUh+u2+RZAWSElm7LqWcIoj1NLp/VFhlthVDqil2hLuuWJAmGpDAKIkWWcZKYVnYgu8ubhFVRWE1ZhB0w8jsrKIZC8bLDfL/pu5NfmzJsjWv326sO523t7/RZ7xs3lMmjBgwZMSoZjVDFEKqCQxKYkCJv6BGSDVCehIDSkICJJBgwAQhIQEST3rkyy4yMzIibkTce8Nv49e705rZ7hisbXb8ZvNeZoqSokyK9JvH3Y8fM9u29lrf+tb3eY/uIrp1mF1PlQyTqiCmmqPSEp1ncy1OllmnCIWUz9vNhuXNkoPFSb5/ZuBs8eDuPVISabeu61ivlswWU1RR4C00zeDFZOh7cWANwWPyLHxMEF1mZmRnTz0o2CeLLkpSjHQ+0foOW5TMpjNurq9lKEI52WK0QvcGb8A2DeUmEIInKIfZdZRKk9qWbrOjqAtev1oJ3U1FVtfXeagj0rbLTMESrmEMHhU0OJhNpqzagE+9TJD5BKmg8uTtRGE3Dtsb4nWHTi0TLcr0y9dXmAhm4GWqhMlNQYwiWUusLaHR+An84le/wsw0/TRBlegWCAVNtbyML5ke1wTX43cyoOJ2Ea0SRV3Q6AOCWrPrHdG5jGHmjVxrjBGep5ghCrSUcukuI5z74Q9TFFkJLDGrS242W2xdE5VGJ50DaMyoleLTX/2aH37/uyQfmTVzNtsNRaOZNRN6H0gq8uGH70FUfPWbp2K3EzuKuqEymuX1FW4TcLsdDx8/5nq5ot11KB1Gq5iUoa8/5/h2BNCUUG0vJlpmsESV12OMzBeHXF9eyM+qQTl8T5RPGZjR+Sb0PpCieK8LHU3oO+v1Soj3VkpYomG1bJkt5py/XnJ84jmeT7i43IknexaiUIh+rtWKq+sd8wMJ9K7d4bsJYTLBhUBZNnz43Y949fw5KcHNzQ2vXzznerUEY7EKFgcTysKwW/cQxGb58eOHPD97ReUM1iou1i+5e3CHpmjwzlPqxPGDe3z2+VfyMOo8B9L5W0R6ERoeHCVFR3OYO1ZY40VYpCgxRia4lNE5k8h00BhJvUPpCtUlYuogQrvtSaUmOIU6hvv1I86fP8F0QotJbSAFB86T+p7oPOTZepWnqZTWdLuO+XyO0rBar/nlr37BD37wV0IpigGjvJSNKVIVitJWLGZ1zlgMXaewSmGbMg9FNAzShTH7JAk7S2AOH8Xsrm072q4Xtf2Mm4uBnGxuy+VKxjvPXqHrSi6GFV1QCoutKuzOoaKoBhRAoTrxmW+XoBu8dxADSllSylNhUbDq4D3eO8pKvO5n0wmr3Q7XbikLQ+kTfg30irQSh4VSOyZNSX3l0X3Pu4f3+PwXP2ZRT3hy/hztDYWp5MH3Qo5VhUGXBU5Baix+qukbcAuFPzZsS0ecRaqTkkW1BRylTRSVZr1dExd3COcB3Wm6XO0VvaNb98TNThpI7C2HgZyo+LEDP0BeIWSjuAytGaWJSgl05jY8ODngarNFlxURQwoBn3JQzs+4KRUEw89+8St+9G/9iGdPvwLvafsOrTR9KyIvv/nN53zv+39J8Bt0SviuYxc8s/mU7XLLerlhVpZcnL3g+PSEsNtwenLKZrPh+voaazQnx8dw9qeHrm9NAE3es1tvmM8mjB1nJFM6ODgYA6iCHNTEHnb8faShEmNku5GZXZQ4Sg7KPINiTIpZVR0gJnabnjvHd6hrxWrVUZclbduistpPXiqQFPPplNnEMmsmrDY7uu2Oum4wjRiaHUwnzD98D2vg5z//FNdtUSSmk4lw8YzFx4ApNMcnhxA9m82amCL3HtzjmxcvefDwBJsMKURiSBzMp0BiWpcy6TH4vI9M2Dx7PAir5Dwz5EXuQ6ILMgRgWsGRB3M1hQS3oiww1oj609kbUjVHuRxcfYH2CkXJ8198BUlxMJtinQPnUJ1kVngnJXIe2xvKPJD/K5J44hBplPgWvTl/wemdexiTePPyxV56b1wbAOJGUJXZrCzeHiaUs1ADpS1FYpKss1CJRGA2KZlUkq2r/JTHrNCvjGQ9qMEmRj6nLguSUgRg6xObfodvnTBujcGYAF6JcEwdMUBTT+i2u1HoZrBIgURdN3lKLNJ3HSoE2vWaYtLQbVrqgzkmGqyu6Dc7Jk3FzEZY7lBJU4TEvG7YrTb0G8esqcW2IiaSNnl8MqFVJOmelp7UTLiJK4rJhO3U09yd8N0fPqKZR6ZqxdWbl7x8/QqjIqmZ0fuW+fyIIimaYLHRoVsFIeJbTfBKnESTNK3SsA4z1jwOZSRQWjyjfAjCy4155DMPa3QYugC6EG600QofBVQa1P11gsP5lNKKhbT3DmU0m80GYwtQELynLGuISoTQ82ZFjMynC55/c0ZdlBhtpGLcbEgu4FtRNWsLg9Ga4s/j0X87AmhK4qB4c3XFtKnGzt2QgU6aJs9Hi1BBjGnvU377fcje3mEIrFLap+TFXTI3JowxzOdzVut1hgoU11c3mBuomonw3th3jyWrkWkg7xzGWB6cHIByzCcFJ43CTvel6LPnX9F3W7p2zenRIQlY7Vrmiwln37xiMrNMmgkH0xk6GrbbrRC/vePk5JhGFxwe3eX85evRjmK7XnJ0OMsyk3u18/0U0l4ouO9lmqfLhHOftTRTikRdjOR5uURi19F2CbqA0pEYFantwYIyAeWEuhW6DW2r0Mpyc+2YVZHQbSlz9qpCQplS/nbbM9hZDM+Vj4n1ZsPhwZwHD+4RvMcWBRB4+ewpmiBivfn+7xW38uQLSVTvm5Kk4mhHTN5MpHqM2ZkgEIMESQvCgyQSMotBaZ0FW7xMemkN0dP3jsrOeHNxiS1LyrLi+nJJ1/UEL9fZh0AoE1XR4LZbJnNP3zm2YUdtK7pO1kgMkbIoURqCc2hb4L2n3WxF5d47Uu9EoLrzlBT0mxtmtcX2a0qlKOeO7XpLU2qME4y20qVc65RIOhHC3u/exZ6irtAHDe1UQ1Gzsy3VOzU0LecXn3DHKaze8PjeHaw+5KtvltjJIUbX2D5RKkNqd5TKiOBOXaDnDdEomFQE5/JzMzBCDDEljLayiedNs921OK9Efd+WhOBlcytKzi5uaNuW6UxTV0Um2KvxOVZEjhZzvn72jHv37vGbT38t+H/+W8P4qNIa51qia5k1Bc4lGjUDNG/eXFGWDTFGppOJ6DNYiyWhQqTd7cB7fIxsBkvqP/H4VgTQwdXQZ0wu5VR++B4kWZDR5WxTyvX02zzQ/Tu+/d5JaBZigyxBeblaE7zPgcujVaKqZux2LvvNZ2wxyYJQxmANMqppZ9xcPef68oKayKpdMT1dUB3cAyree+89fvLjv6VpGnmAjaGeVtysLqnKguOjI1bLFevlhjevVxweLbCl5fzVa97/4ANsCFy/foNCUVcFYvWS0ElUmiIaNZDkUWOA0sh4Z1loVGloSiOGbjpn20rlhcpoJhdixHufsUqhpOC94KE+pxoREQIxls22QylDXdVMT45Y3XTEsCV5AeKT94SYsndV+j13RbFarTk7e8X3fvA9AK4uLoSSFTOlfxhPjYntbksfZIBggGw2bS8q8kpRlhVFIaIVLvvHy59NVFWNNUZ4njHiCZCnkAa8lCzCHDMtTZuCbet4fb5EW4FLgvdE9GjFkiL43oOpiM6BdwTv0Grv+FkWJT7zcnWGXIb/YkyEvieRiL0XXdreC3ZHxHQeYsStDanrqI0hOUdlLRfblrKYyU25JYqSSHzwg7/is88/QZeKWCh8qbHzhgt3SSh21Lan8CviNnHw8BiM5e47f8EXLz5lcxOoVaTsQPVQ64rYtWgv2SMxY5l+aBztn63B48s5ccuV3EeyzbqumE4nRAxG5/I+Kj774itSgtVyyTIl7t45RRlFiB6l4K/+8q/ou57kHfPplOdPnxFDZHEwZ1DDVwiJXxnDanlDVRrqskFpQ0JzdbNltVrx6NEDdruW+WzK8vqaqm5QCb77nb/AB89vPv0UO3ip/YnHtyKAQsK7Xnx6bgVPYMwwTGHYdS0qa1fK92AA8IbZXPV2UipTS0qwGp87sKAIeTwtRE9R1JASm3aHD6ITaVTCB9kLSx3QaAptaeYLKGaobYftxa+6qC2biyVVMyeUCkPB4fSAq9UKRWSz2zBtambFBG0Vu9WO+3fuc/HqNUnBarNjpiq+89F7hM6xWu/EHI1EXZdsNruxZA9pv5iB/WCBAncrYKWUSFpRmry754xB545pXdkR+oBqhEZQis+enBEjKFOgtJOxRa1IqiehOTg6ETWbPmKjxrsW74WsnpRCW0u73WGUhmz3MAyiNnXJpCk5ODjg//4//wZtDXfu3GF5c52hR4MtrJC0rcU2B0QfcNutwA4aTFHgnSemKApWqRXYYMTmZFNZb5cCV6SYMW0/lvHaCPWtdW7EKwd4JJJIykBIeB85nk04v1mSBu5DCLgUSb6lnN3n5dMnmUEim5q1hRjdaSW6A3HfZbbZbyo4EYL2NqGLQFVPcJstnoA2JYUObPqOdK8n9ZFIz9XVkvUmcHgQiUFgWgqLToGoFE9++TMmh1MOPvo+0ehMX0rcXF8yPTCYJBBE1/csd55meshqt+D0/vf5/KdfMKsqKqcxm4DeQuEUqfMkF7BaExLSjMSMttCKhCmE8xxiz2RSMjyWg/mfa8XKIwRHTIqLq5U8x1omBoP3XF5cExWZTaL42c9+zmazIabILz75lQx7GMNytUOpwa1UKr77H33EzUr6FtLdV1xcXOFTYj6f4XpH33c0TYPWCu86YmkgBS5fvUJLI/7POv68vPX/50PAdp0zoPg7mQtkQLsobvlN5yN34Kuy3JdzUjnu3ybTJow1o+EVMBLvB6hgIKjPZjPquhpThpPTUx49us/iYMr5mxegHKenh8ymFdokGf3EsTx/hUkyvvbBx38BwOnJMSkG+q7DO8/p8Qmr5Qql5O8IlanHOy8TSlFRVSXGaCG+506yyh1WhWSZQ9k+ZFsyncU4hTVgeiEOmeoeO9ZqT4IasqLbO8900mC0NLgIYbSqSF4kBW8uziEk+l1LaQvun57y6M4Jd4/m3D1acDApqE2SdDYJUyIlMhfT5opAlLQAbm6uGdIaHyNd17Narbm8uuLi8pLlzQ3OOUI2E+y7fhwgiBnOsIWlrGqKshS6Uu762qLIxG/Bn7UWr/YQ4sjUGCqZ4RqTtytioCks3jnxUxo2onydjbFMZ3POzl7m95EG1oB7DvSt4b4MMJTrxTwuSmQlOS/3ISVi70heut06ifpWAtbLFZcXlxgtojhGyUagotj9amOxVqN0wk5KklZEFQnJcb26ARBbFN2w9RUdB6y6CetNhVXHHNpTmtBgNomiVZg2oLuA8h4VAqF3qBSJmee7d9+EECMxQVlViCaDJiaF0gZTlFTNhMlsyvzgkMXhMb0bBNEDicAHH75H1RTjPRClpGK83yO0mte6SiqPh2pmR0dMyoLdbsuubVlvNqzXG5xzkghlAzudM0xjjDx3R8eQIsvrK1QKqPRvMo0JMjYV2e52Iq7xW8f9e/d5+vRpDhQSPOROSgDo3W1lpv0xBA8Qn6TjoyOurq5ktpxEYUuqShTOr26uaeqGzXbNtC6ZzaZsdjtSCuy6Fud6vvtXP8B1ay42S+bHC+6/8w6oyOlhw9Xzl2y/ec7k8XtEbWiqgtevzjg5XBCcp6gUF68vOFoccf7yHGs0zfQIlRS79Y77pyesVztms5ree+GxRnIQSPuMcVxUcg4D93OPCcqx27U5YKVxGAA1XJO3U/Ux+CLNml0vzQtpTo0XExeF1O53W8z8ANe3BCMjhCa/kS0NzYO7BDQvXl/QuUAkivAHJQeLOc+efk2MHmM1pRXl/f6W/NuQSZL291Sj8ly7wCLjA0wQwva4AtK4DLyXwBZj3G9EStxbExLIB3iAlHKGBYQsSFPKBJn8bh4MTkpgjaRAZYUrI5tdCNK8FNrOfi1qo7P9rwTZtt1hihKdIiYqfNsRY6K2Ja7tsDYxm03FbhvhrbrOo4Jn12p0I5VYcAF0Cdst04OSoik4f/4NB9/9GOs8r55/w6I6IIZI6xq0q+h7Q7OqKTuF7kuKNvHB/AOKrmB785RiG7Frh946UptZFcHjuw6rZUCF/Fz5ENj1/QjByCCFYNBaCRdbdDsdLkTOXr1BK0NSkhTdv3uHzXbLZrdDa6mKrJUM9/T0lMIWnL18jTaKlIIIlAgehaoqpqcnXL15SWXE3yqGyKZtsVXNarOlbmQk2zlHWcoai3HNerdhvbzGtxtiSsxmk98Tlf7h41sRQAf9x5inZmaz2e/8zNHhIWffnGV6yh7rSqTR43t8bcDQ8lel9dCi5fLyUrK7QtR9XAgsFjOWyyVaKbquQ2lD1/X0Xmw5Eogdb9Fw9uwFfdeRsDIfrHPTSwW2XSCFLU27Qtczvv/DH/Lks19z5+SU8/M3rFbXKDTTeUG7E5Hn5arHGsNiVhO9Yzat6V1HiIpmUtD1PSbjtEMGmbSUxEMmPXwdG185iGp1u7mSM828+MZrBLdKefn3gClKsIBB1l7nhhwhYo2SGeROiP5R6VHkA0AZ6NtOKGTaUBUVpMhHH33I82dPefjwPkfHR7x68YLr6xt5aJRALrkIHz6OfI5cWSgl+O+gO8l4Trf/qYa9Zf9uY9NsyJ6UCFLc/uW8yQwZ1mI+I/aOEKUsjznjJSVhdQz4vC3wIeIz/pkGvDANjb00rnOQznTTZOqbNtiUUPl3g/dCC7Kaw8NDdOxZnr/GRyUjkwpW2y3rtpWpOGvRVnM4n6GT4fDgkDNnMC7y4rOvWekNYeJptcKphnXQ4Bt6C3UdsF5hd4piW1BsDW6dMG1AbZ0Y53lPcj3Ky0iz63thsORzs8YwbZr8mlz7GARPb9tWsnwfSMGjVIG1ZU4GIg/u3cs6BYrC1ITo0FoznU5lRDtvfNZK1aiVZN8RKecPDxc01pC0Eo64VlitaWPCtaLEtd3tMDpXoMbQNA1931LZisP5jO3FOaenJ7y6vvn7QtQfPL4VATTnkyil6Lo/MNWvtOBwiTyqmQhJMKYh+9hXr7J4y1LmrhlKPRRNXZCIQKCuy6w7uaawVkp6pRHhGSGlKxQxJFCRpAxt12NQ4lIIqHwJX3/9lPOl4/BAYV8+Z/HoEVHNuHP3Ls+++pqu95RVxXrTsntzjbUFTTXBlBbX9hwfLYAOHxxlUaOMlYdvKDGHkkl2jbF03fPuhDai0v5htUWRmxZxvM7jFX8LBsmTTFrz+tUr5gcn44YzXliGrALunp7w+N4h1cGUsjihXZ2PQrsDIVkluFouScoIlcz3lJVFq8QHH7yLcx1lqZnWlsm9U7EWcYHW+2xPLQ9WiomE2X8MkkATIeSNUf5YGr+/z6Tl3wPR6+319vb/23eOhVudmM3n0hRxntaLGLZsROLyqTVMpg0SED1KCR0mxP1mg5L16UJAG43N0IEQzwWPTc5TGEu/2ZLyTP98OqE0gWo2g6sLim7L189fYXRNiH0O8uqW86Wntpp5M+Hrr54TT9/j5a8+w5xOqGpD7LekK0dI0lScFRV6E7EzT5UittVUazCto1851K4n7XbEXoJoCkGSlJCx5DwRps1gnJduwRbZ7RRNU+1pZ6iSL5++gCSWx+88fkhdyfPcdUE2ZiUiM01Tj8T7YcMbIB6VIBWGg9NjFrMJ24tzUJLhJwX9ZgtKs91tiQmWyxVKJR7cv8/19TVlWbJYHHB8MOfZl0+4e3zEq4sLzPTg98edf+D4VgRQuTCKmAyu63lz9pTTh4+J6CwKrECJOK5UoQqtRMPQ38o+h2MINn3vMmFaqDcpl21lVYumY7b2aKoKrZN49miD8z1tZ+mvN2gNMYPdQzkZyCiZsvnh1by6cmhrWG47prohtDv0ZI7NjKGirql0pJg2shlYy67t2K57yqKgCwprJvRuR+8ci4ManZWdhibMuI6Ghz5fmwSjuDJKyiojw8g56A5XOV/KsQk3dDOHjB3WW8f8MC9WdD7bPKqHRikI3onfkpKx2ugdKEsMSa5VznyFX61QKhJCwvWJL754wo9+9JfcXF9RVU2eeRbxamM1VkmZjs+TZCq3z241CBMS5OyQYfN25ngbMxvV00d2R87iR1hCD91IubdKYX3Ad1ui0ni5uOP6UgC6wHiHqYrsE6RIGmxW+9LGjiLQbSvKVFYbysLShk426RQhJpqihLaT+FwYChSbvuX0+ISoE2zXqLUnFOKTPtw72USF73pSlJjY4XWBURMmsWe59qRmQpFKYuHod54ShTWJRV2iloGJLSm6hO567LZH7zRsHXQ7aL0wL7LzLXGfrQ8W4WN1k8jaknKF4nDdMWgdELcum839PHVlqKtRr4mmFkZFSpq6LFhMKqyR9bjpMj6shq1CcbA4YD6bs76+oNKyQTnApERTGFRRsFpvxD4kRnRSPD07o9YF5XSC0vDqm2fMDw748uUFCcXhn1fBfzsCqCjHW7quw+qEa1tkSluPu8/FxYUEnowhDc0RGKo1hZSaasy4hm+OXEmlcC7iXMtWdbmhFDCaDMAPD5PhZnlDQlOWxVhO7jmXjBnbvtgU2/qT41MIPdtNSzMrqedHHC6uSVowMqPlc3gfaGbTEe9brVfUZYk2GqMURmvW67UEwvy3h87n8LCrvKqE16jGMkrpSN+7zKncd+vJnzblLFEWusrdZ3FNNFYjvkReSOb5/AZPG6JivW1Z3aw4mR9jC0MytUzh5FI55YGGoWweymkx7xuMyXqur6+42YhTqFx7CdAxvg0v/HbOCLx1XX4bshnOd1wXtywoGN7trUblgLyCDYGD2Yyk4HK1RsZQPcOvaC0yhYUtsIXl+vp6XHN1VbMJgRADtijo2paqrOidI4TAdttLVaA0ldLQO5SOaGuolCJ2gaAjh7pi8fgU3XnYJOq79/j+9D1+8tOfSubZVLRdJw2lGDBW8+id9/jpF1/RHN/l/Y8+pH31CrcOKJewBRgXSUpRoFjMDKYLFMZR+hY6z82Lc06mR+Aim5sbJrqU7n4YNu/cSxiuW16jfdcLB1Uuzh4OGswDY0Qpy9NnL3N2Hnj38WOMkg0ZhJMbQ0AZGX/WRtP3HWVZia+UsgQlkNX8YMGdoxml6Zkfz2TTDp6kNF3bEl1kPplxcXl5aw2A0ZaQ4PL8gvL0KN+PLfP5TDb/9Ltr7I85vhUBNISA831WWHek6BkFdRV03rHZtZBL2ki6dcIjusdQiu2DzHD5cgAaS979LmqNYbvt84MbxjIxRsFl93QNhbEibzfgM7er4KoqaTdbrq5uMLMJfrVleh9S0mhj6Z2jQ8nUBxKQfAiYvL3WVSV0mgDTeUOIYm88iIfAPrsamhUjxpuDK8g5xISo40+mtwL97feQcKFRGQogN1oSk8k0O31mCZNh6x8ut1K0veN6uaF/+hUPPviIJ0+u0VrhnBvthquqkkCtzEhsn0wmtNs1MSUeP37MarWmc9einZpTQnVrgxru2xj43nqNt14bXr/9M/vv5a66Gv7NOIgxaAMM16UuLME5KduVHvFLkKBtjIz46ky1ev7yXCa5rGaz3eRSFqmWIDeohs+z501O6wblImpghKAoFPjKUhgFxuCfviBdrig+eFe6/zFmTdFMtdNKynEV0UWJNiW7zZpnX3yJrRvi1ok0n+pY1NC2HfPphGYXqZOh3CqM61AucVzPOPvqa46OpmwjOJdtcJRM88maV8Q8qDA8emVV4p3HFnZcH2q45kljbElMlt7L7737+AGKzGdOIsgcYuDk5Ig3V8sMbQzGgEKYB6k4JpOGu3eOqEzEJAcJtLFgDL3zTKc1KVZc3mwlFtyODklozYuqIfQdH3z4Pt+8eIUualCWs2/+jDlOviUB1HvH61evSUpzNJtQaRD6Sx5PVHBwfMz15RVD6ZrGrOLtYwgo40aJGjuiQSmGGdH0VlQw486qMtantCHERNu2TDKlKWbcZ8jgbh/HxyfcrDe0u451WTIrFN31Gc3hHTANm+sXBCWcwEHDNIaYmYUyUkj+RKVVVHWJNQZG/X1kFlkrQsy0pIGgPXbY1djEaJpGStac1aX9BRmvy3jN5MKhteX+vft88uvPQGfvb2IuYVVuUinAcrNLVGUHyXF6fEwistvtKIqCvu/ZrFdZyERGZ62xdJ3H2IJnz77hgw8/4Ga1JmQ3VYEjhqxEroRsareridwpz/hvyF3128db+Of+xfHf+/eT8xkaZtWkoihK+s2GEAS4kLJ/390PIVDXNa5fY03JZDrDuTMR9vYBomc+m9E7j/M3ZhbHAAAgAElEQVQOayyLxYLVSibedLaIUUC72TAzFhXBaiuK+SpSmZLZwzuEF28o1r3AVCWcPzmj0oYuV1kohQqBxWJCOSn58sunFLqgLjTu5oIqnaDaHYU1hOQ4tjXTwwdYbbi5vqSZ1piloltf0xQ1yQfa5Q0/e/Ip33nvAZeX5xwfHRGDH2fTBzzy9rjtHlbaZ/1iFyLqYEopPv/ya1JSxOCYzyfE6Gl3ndCekiQxB/M551c3GaZLOdPXvHj5CnRNYTUPH5xS0WHQqNzJl0zXUErvkt57bpZLhqnEocpUKZGsYVKVpG7Far1ivpgTkuHzL77aPxh/4vHHmMq9A/wr4B6yJv86pfQvlVLHwH8PvA98BfzjlNKVkhX9LxFzuS3wT1JKP/6H/o4xBSqPgrkQpCS0iW3bEhGen49e2glpwOeGGzlQleQxuU1xGYPHbczrD51rxnG6rmdACq0tZDGgx7JSa0tw/Ti9kIDjOyd88eXn+ChqTcFtaV98RT2dce/BA643r2laRa/9iMWRMaCheZVy6XqzvOGdxSOc64k+jE2DwWNGNo99JinZY6T3CRcCPkipPHy+orA5m87Y5ltQhPyPMQWff/kV6JJkCiHBp4yxJsnGlDbUzYTtZkcboe86BLyLFAS8jhQ6UjUlipIErDZbMDqzGCSLv7q+5n2QRhkDjKBISmEYnAbSmAHr0VdA7uPgaX8b1kiQJ4X0WHhokzLEkTeYlJkJg9FeYmxAbnctqu1kpp8BA2acLEq5+54yNJFSAiMUJ2OUrElb0XX+LQ+gm5ulBPlsfWHKktD1TI3N90I28qRAFyVq3rA4OmL3N7+kLRT2qMLi6XZbiklN7GW23KRMW2smmKri7Jvn2KLg0QcPOX9zwS6Ans5JzmJUYnkR2F7dYJRwSG21oHcdm8trivkcjaZdr+k3Dpsq6jJzkXVJ72TKagiWkTDeN8GE1ajs5YPAFykogm7RqqHrI4WCj7//F4Tg0ZArlESMnhitNOKwMuiSkohA4wgJgu+ZzhdE35GsVAVj9QVk/wcSnqKqaHs3NtlSlOoxGrhzciwBfGdAS+D+7NOvgYFB8Kcff0wG6oH/LKX0Y6XUHPh/lVL/G/BPgP89pfQvlFL/HPjnwH8O/PvAx/m/fwf4r/LXv+eQGwCag4MDVlev80yhENyDFJu3gg78vvRzpKqMDYVbGcn45Xc5kMPrKVsG37lzR2xzfeDi4pzCHogHe+6c7o3Z5E1lDthwenqHl68vePb8jI/ee0SnA1fPX3D8QcO8nKFLmGhyViiBzFjBIPWtclErGXHTuXy+jc+MM/DD576F/0EiBc/N9VIyQSW3N273ePEAcwwXZFQUSAplKnxUaPz+/dU+mAB07U4aaynKbPdmR1XXuHZHwqBsIayFlDhczJnPpviQeP36HBsD/dbJxElUJOfQCGxihtHcWw2boQkWVb6fObDdLtOHc9djiT54kGfBi9vrY6TGDGn421XM7WxXNkqdceU9PanvOpRW2FKurXMOaZyknOneekM1qIXJ31HIuKMdsm01olT4FJjPF7zz3vtsvnhKsLKh2JMF7mZNADbrDbqoJItNgNI8O3tBoTVHRye0fc/r8wtICd/umNQTSiudcOW9kMibitl0Rup7UttRGYsKgZ/9/Kes1xtsaTl7+Zy7Jwe8fv2Cpmqo6kqy0AyrafZiMXJW2X0hgjUlvvfioBoNPl9SY8Qoz2TbrKGS1Nqwa1sSFkXmvqZIjMMzoiGKiHlV1bjdmrIsM0F+WCdplM0j7eGbeLtSVIa6slSFBiVaszGpW4HzXxMGmsTX/UX+90op9SvgEfCPELtjgP8G+D+QAPqPgH+VZJX/P0qpQ6XUg/T3+cMnuZC6LHDeURTS3RYrjkAwGr/rcgnxu6e7byDcekuGIPM2Xqbyqv2dIJqyV0uCq6srCShKYW0h1BOt6NpWmhcKYvAspqKSpJXYuvadGHg5H3jy9RmHh4eclI7dm5c8fO8DfvPJ3+H67HtuLVVZoo1w6QZ1KIWi82KrjM6TQ/rtrvmYPebPLeeuKTSYquTR/TsodFarUngXZAolJlrvxqmZlDIpHFlMInQvAfnwYMb19Q1gRnzSO0dVV8JJBVZt4OzlK979znf45S8+oXcOtd6yj2VD5zZRGk2BIxiNC/Cbz54wn1TcPRUzr+BDnhYTbUnvA33fC/E59MSUA+igqJTfOfpB75Nc2e4td8eQmwPqCF8MWWskT7btNzSlGP++ywI0Q9DWuckocnWR5MXHquscyupMXRpj47gSBfLJGXNKVMZigtoHW5Uo6pKt3/HN331C7ckNrQh3j9h+8g1eKwpb0Ic4ruFhrNKTuLq65u7DR6xvril1QrU7wm5D6DpMWbGYz1jM51KFdDJ0UqDouh5VliSlODg+4r0P3uf1s69Yb3Y0zRyrDV0rAuAuB2GtGdkt8vljDlyglBjHaa25uOy4vLoEFfn4O++PuLPYqgx8CNF4/fzJUxSKo8NF1nkQZobWIjJdFiUqJcpKgnnvxCp5sKq2tiAlTxpBMcYNDAWz+YyTeY0KHdEYQij49NPPRCv3DzQq/5jjT8JAlVLvA/828DfAvVtB8SVS4oME12e3fu15fu2tAKqU+qfAPwWoVIMtS1welatsAaYERADER1nsWgkDc3w48k0YS/RbMVGzL2+HUvcPZZ7DTjqUVGYor2DEz5RS0pHPu6IuS4qykHFHLZQhmbRJlFayliJEXm23hN5THZ1QGEMxbbC2GKXO2l3HNjh8kKdGa4U1ioOD/TDB+KlHnHb4Z2LoDINgdJIhS8lpkkehsIWisjI3PNPluPC11vgQiRGenr1GjzCkQms4OTnk4uKaYZjAGFGjQoFKkT5o+l4EXkLIs8nsA0PSIl+nEMpKSD1VVeLanu1uy2RScvHmAq2E7dA0DSYkrAJtFPV0gp7l6iQHrxAinQ8EH3Cul0EDBUGZcS0Ifia8TBL0t+hvku0r+T1jIA1iy3t5xKIoRZi578f1MwTRsigwymCVQhUyKsutdbjPaBQogR2qsuDo8CBnTAm/2ZHWHSlGggJTlthC88H3vsfrH/+KVifKIHYmNmoWd+/w5vKSpDRK7RuoA5xw9/5Dzl6ccXV1jSkKjBHJPI00Fd2uZXHvLipGEX3JOPqLFy84PTxkt91xcHjA6b37hF3LxZtLCpU76Eijx2jNfDFDqUhVkq9Tka+nZPchBNmUrFh5TGYzHjz6gKfPv9zDVkoEmIE9Zxmxemmqag9X6cFVdgimci2HZKLIY8pG65w45QCghqavPNsyZTTjYFoRvMMgqmC/+eyLbC8Sck/iX3MAVUrNgP8R+GcppeVvdUKT+hM/QUrpr4G/BjioTlNQcuHromCqDehClOgzTWG5WjGfL9isbhBgaz/yp9jvZsA4KTEs6OFn9sV/2qeoEh3lLUcsdTxryTZjRBGyEIUGopSgMUsV5Z+VzG0umaUGoxzz6iE+BpZPPueD7/0Fv/zZZ/iwGzc8lT9riAljslhtgM22pSqLjOfGvOj2ZZPAmflz5wdzGPkczjPm0opbG4zK5yn4U0RFT0zCOA05UKaQuLq64vBgzuFigrIVy9UK73PnOims8hydnHIyKwnbDR++9x6ff/kEjUyGSHo4YGXy2ROWSovrYtlUXL25JOoKnyK7dcdml4V5808PnlZayUZaFjI/3xQWCkOsC6xZyNkOfF9jZH48JVzGgjUiJBOTGgUn6sWc0R8+ivuqyfQ4lwVGyMEjZvHlgV41LQrmTQW9z912mUl/a33ne2StBNtuu8uMB1HViilhtUbVBcHCx+++y7Of/hx8QivLWifa2DM5u0Y9fkj79XO5Z2Fo6sl7Fdbw4sULDuYzth58hPVuw+zgkBBkXRRlSbvbiZh4FjLZbne0XctPfvoTgnd8+N3vUSjN8uYNj+6dkIKnmc3YbrdMp1O8F4Hy3jlu1htRlMpDDymP/Hqf4YwYeXDvhEfvvcvV1Yo7p3fw3UoU1ZIIwugBygJevT7nhz/6EVVRs1q+JgRxGtAGyCaIs9kURc9gZpOUyj5Zka7vqcoMMRBv3wSOT44x1nC6aFBK03Y97a6T7JSU3z/x4MFD+PpPCGD5+KMCqFKqQILnf5tS+p/yy6+G0lwp9QB4nV//Bnjn1q8/zq/9wSOGyLSR8a2mqaHfQgroFKWhpDTTSU1yHQRRVFdaUegiCxkI6brvHM57nOvHrHEMgDk4KPa42YgfxiGbFS5ozA9jTBGtoHeeqhBNx+HGaWtoe0cMKeM6QpPq2pauFYsGqY47iIG2gsN+xw++9z7PX7zierklhkhwPQPReijJo1ZsO8+29+P3YNixBY+z9nZGLNmk1Zpuu6GpG1IKjBSdfN4pJoKS660zXhzFSBmVUt4aQBnJKENI4uoYOw4mFcYOTZ/IR9/9EevrNbPFEedPP+PO+x9TWJFV8ylmbNXss3gFCU3XdegUqYoCTcsgNqKtZK+3SymljDgKAD54uj7si4Vbh9EGopdpH2tG0ZlSi9ZkWRgZE3bC/ZMhC9H+lGxzX6kYo0lJ44MXubtblYtW0LUtM1sxX9wVaTStCdrewgT3+OxQGUQFfRStU23ycEKpSZ0EIaUVlBUmGCgUu9Rzv5pjmxr7+B0JUjoSHJBl9bQ2aK04PDzk8vycu/fv8/XXX1OEDpfXiGxc4s11eXmNLUqKqkJp2O62vHn1GpKMqT757DPa7Za61LjOE3yP6TqC60ihpLCaFBXTZsquk+zZKC0KahkfJq8tlMIazfryAj3S7aTauby8ybKDmta1PL5/l48//hhMSQq9PM9xwEwRz3qjKLU0cuX+wJ5rLA2pftdR1gWrzS5nsbLmry4vuffOu4SoUUokAW+2V5RW0fViBWOMpqn+PELSH9OFV8B/DfwqpfRf3vrW/wL8h8C/yF//51uv/6dKqf8OaR7d/L34J7L2+r5lMZ9z8foV7989BlWgrMYWgcIaTmZzfvnzn8osrFKyw6ddbjxIkByMhI3WY5ZllaGc1pRFgdIaa+Rmj8o+3tH1nhDFA4YkE0WDon0CnHcMrgWCuyRC6CirmqurK07vihMm7EfPFKDHZodGRcPzL74hWMv9+/d5fN9ycf6K4wfv8JOf/AyFFZqU1uP75HBPuM020FJqdq1hk0WLBwgjBpfLzD6T9kWBKkax+/De4ZIeS1n5CxJM68mUlEQnVGax80CCLaRzqpR81YmPPvyQ6DZMDidEldhsW45Tz8N3H3FzecVuI5uD0+lWhiyhKOXrIViaJrC/V3k13KoM8n8D9Wz4zPF2kFX0XYstS0KQ+0nbj39nCIwxRiyihZqQh3wymXJ0MBeMMsq4os8P8ItXK7GzBoZJpLIqcV1PHyKpKHny+RekogBl0dGJB1MGYzUxf+yEdxFUJCW/p56lRFXBpCl55+F9nvzdTyHC45M73FwvmT56CIdzeS+tOTo8IHnHbr2l3W3ZucjBwSHzQnH03kOOTxZ8+aUjBoW2hrKqReREa5yXMlUXYnvcb1ou3pxnDyNyAzdx+eacsgCrhG2w2Uqmttr0mSmiOFgsKMuaqpSSuO96yLSyFFLGHaWyQssYtGSGMnp7emcuEJhWKGVltv/mknoyG3V5h5kNDegUOTk8Er40aRQYHxKkhEAyZVWCgm/Ozkhqj2uaohT6mJLR66vLS5RSHM1n9M5xuVzyzjsfkvitEuKPPP6YsPvvAv8B8HOl1E/ya/8FEjj/B6XUf4wkv/84f+9/RShMnyM0pv/oH/oDKYlwg7UGZS26qsZAVNc1Xd8RQs+77zzkm+fP9nPSSTHQlBSKeKsrmwC04B1u17PZdZAv9oAfjl0nBVVVcLBYYIzsngPHMJJQMREj44LxfU9SCde1uG6HDJgpYvR5cWRcJmc0TTOl3W5YLbcEU3D/geXLJ1/yzqP7fPrrX1IWGXIIAxwxEOSHxGbPjYwhAZqo3Fiuj9myKVAJXEi5pAyoPo443Xi9xuaKEj+anIWDlITWCoKsRzqY0FMKK1nyV19/ycO7J9QLEVKJLkDbkvqe5XL1W1MdasQQhVqVz8sY+hAoi4ydZahhrAiy7oFKQluCAYtExI1vbTEYu59eytnOQFdSkE33FCTJ5HUW+uic48X5G1RKWKMoCktV1ZiiEDfJPCabgsxpayUVUo/ik08/550HdyCFLKUmzISQBiaBGrG54XMZowkZlzO2wGvFpgt8+puvMaqgmtZc7DbQlMTjBQnDIPO7Wa0woedwOqE4nnDn3Q/527/9MVsj+gPHD+5ycnzEm/Mr7t29h09weHRCm1kDdVOREuw2O9rdLovEiKgKKFIM1FXBwbyha1uKoqAoKkIKYsbYtvSuZ9dt2WxECT54j+sdEEf8HQRCq2oJXIPSmdaiNRG8o+976qYmxYBH6ETeddLozOPVRsu6LIqCjz54n24rgS8OTTTIm1GmmOVnf8RAlVQmIUbW6zWLao7Nk1NKaawCq+AvvvMhnRPe959z/DFd+P+L3ymaxuPf+z0/n4D/5E/9IGKI5pmUspOo2GGw9F7I9H2Ew+MjXLfCdaKKPpTlQxBxQbCurutzCRoJapAh2xPVx69K55uQcFHxZrkBo9HRc3xyRFVaVPZX0SlijaGpJpBqoUDEgFGB118/QWvN43vHeTxSfFu2bUfyge3qWhZFaZhPGq7evGI+m3KzXHGymJGSoiirrGH4dmMspZSzwojzUlbGGIhx6DbL+Vil8LG7pT6/pz/tlYH2uS3kJoTYuA33bgyuCZmi0WUpjpLOSbdZJabHRzQHJ2yWa6aLgmrecPnmnKvVVmyS8/urKLSX4CPOBbRV1MaO5eWDR4/w3S4LD0P0Xu4Jt4JlkvvY971ogoaAu6WxCZnXqEXNvBg67LkKQCkRw0AgBMk2xytAiJLh9QH6EOj6Dev1lqGBqTW4KMMcPgRKa4U7mhIvX7/BaiispplMaJpJhgcc690W73zWJbCZpualsZWhpZA1SvsgzamkFMcffcS8brh4/Yaju3fHzURpQ2UrrII7j+/z4uwlMQhzorEGvJfxRcRKWKfEarkErSms2IuI0pimaSa8ef0qY4myWI4OFxA9VxeXAl9n6lZRWNarFSlFnPOUZYExJV3bYXL5XtgCCzR1TdOIwIrS0qirmjpPblVcXFyImppz+Dc3aKsoipK6qpjP58TcKLZlSTOZ8+z5GfcfPaIsS8LOCDsli+wMVQUkeueYNBNClGdFZVm8+WzCzWpNu1nzOna8c+9Imssp4ZNnfjDH+QFX/TfY0gMU19fXVHXN0cmC5AO7y3Oq2RGRQugMtiQhJmultZIx6f2UToyBWW0I3jCtJrm7qvBKfKZJWtR+um5vdKWTeEQri7IFSWuRxjITLnYOtesoSJweHYiQQ0JGyzwklS2VSRRGHlJtpLZQRObThvlEaE4oUW9SSokIRww5+KssiAG79YqyLEfcjCF70VAYoUklyj0FRw3k4fwpYuJmKZQQ74W21PXSDBmZlUqN1hUjVke8NSqeW3EDJBBF2DalQFXV9PSShew6PvviCR9/9DEoxYP3H6OV4oSCn//i13lEMeIzEirixZJh28aO4rUvX4oQsfBdw1gd/PY8+/DzOjeAZpWouk+aZvQ2ulwuOViIWG/X9Xjn6HwWyh7EU/I5ZstXAIo8SqjVQCMT/qrrHaTIZFKz2jpcEFfTPoshhwi9zSIwyqJWO66WW/ncMYDaT4EV1mKLAmPECcH14lFVKuFGTmcN3nuaScO8nPKrLz/n4+98iPYBrDyixloqqxhUi559c8akmeNjj8Lyy5/+HJSWJCN4yrKiqhtilMzX9Y6yKIkxcX1zQ1GWuFaaPkrBYrGg262pZzNRWdK5tE8CtdjC0rUdi8UiB1eF8y4HRbFw6XtH1/e0bSuCKlnFCcDWoilhjUBp09mMXddycHSMQvHN2UtSNtw7Ojrh1DaU9YRmOpPnOHiMEb2Msizfih52GLE2RrLOFIkxcO/uXW5ulpASm+0GrU+yLqvQ626WS+4+fMBu50cTyz/1+HYE0JSVvjc97o5id36DuncXs/NEmyhMhWPHjgK0JfqWqGxWakpj+u59QikL2aqj63r6kMTvJ5eR4u1u0EWBMgqskZHJoiRpjbKalJXSBfcLvNp2mOAhBFRw4hOjJKTFEKnKAq3UWG4NpajSQ6tdZtO9d/ReXtMZ27oN9/k+YDPPzmRu6JCFwhBYJNrppMe/I5cwcbyQDEhZg1KWmKpbFzmXlLm8DEE0NZ3zdF0nwTaRaR35TKwEgNm0QivDTQqEaFlvOoxNPH3yJe9+9DExXKOKKRtn+MEPf8ivf/pjeqfGXd1Yi7FZoN7WBAWvLm9IUQxa5KwsfrhuckZjoyuJSTgBCAmcSyjnWW5vxvMPIbDbvcmltoyOzhpLWTajPUgIPgfsfZYK4iAgEnkR13s2qw2lSpSlYT6x9Lsdk7LkznHDi8slDkvBgK0mCC4P3OYNSGemRr7yu14su0fVKwSzd0RmZYPvOkxV8e6HH+JD4LsffczlqzNO791jKP5KIto77KThm+dvMKbk4HiOjY7dtmXjIWhF3TRoUxLQuOziUJalJAoR2u2Goizo2y0KQ0qe2ayBFPEucNNvs1SdbBa2sCLwoSRgbtsdKUastVhrmU1muODHgYKqrLBFiTaWdtcKjztG2q0nAWUhcFPXR6wtePPmCu+dEOO9rNPtZs2LF99wcHxC8C3nr69RKWfypsR5EV0eJg6HNbBrnayVlNB66MgrYvDcOTogKsudhw84e/pcYB4ihVasfQv69rPyxx/figCqcsqvUJQYbJ9IpqCPPSaBtzApGpbbDamsCK5Hx0DI3JGx3ItJus85oNZ1Ra0t3kecD/gQ6JwnkrBW/HtUUQgYUhbSMCk0WDM2X5L3aJfViXpPcgaVHMqLxYUGuk0nmOEAWiLLXnyxJasSoB5C0iO2SpDP3nV9Lu0GW4h8Xlq/lY0ZDVVR0NQi90UuY4ZFNOKAuSO8L0sGzFgyLEXCqIhVUJSapmpIaUJRiIAzKHZdj3MC6m+3az786GPu3S/45JNfkaIoKhXGsL48oyguSX7Ftb9Lv7pBVSU6elLIDAgtTaoYRNnfWs3jd99lvd7w5vVLxsZR1hQdVsWeIDakyPtjn0PmTNVWBBLiaSSbaR964qYdxz117tzGFLEZSpjUDXVdEbx4mE+akugj/XbD6dGc2bSi77ZUqUB5x/3TBS5ZnBOBGx+ksWEzN2ewEkm/tyQcMN6IMZbSZBW4EFHR8frrL7nz3gekpDg6PJT1ApgEx/M5zq04vnuHn3zyBTEazs9fU+hEoQuUNhyfnEAS/qZRUBWNdP0zFhxioCwKrlY3FEUhJXkhJfHh4QGb/4+6d/m1LNvSu37zsV57n2c8MjLy3rxVFGVjAxa2kUrQtUSbDsgWEqKB5D+AfwAadGjRAYEsuWHoWMgSAiHRwxJCooWQsGXj8n3njczIzIg4r/1YjznnoDHGXHvHpaq4eUWVspaUisgT5+yz91pzjjnGN77vG7snLZi8BsKb6xs+3N8xbDZIKcyLdsmdOM02J8Xhk/G3m6ZlHBd1GtsMgHqHVuBITWVO3rS1FSHicT7im0jftHgUsnl6eLDsUp3F8OrOn7JCIcUMd2ITCc4zzeqEBQHnAo+Pj1xfX3J5dcHl9oIvv3yrsuZ+IB+OCMI8jmaq88ehlH/y9b0IoDi1k3t585zjOHIRG/aHkeuuo91u0GFlcLnZ8CTCNI20JIoLq3xLrxMdpeRiY08zvoWuC4h45smxubzENRHXtJTYMMaWN/f3pMYRth05so669bmlSYKbZg2K2IJ3Rs0vgq9OQr62jbFyWTTrEm38OKdab0Q5jgKU4HHKI1ZMx0FxoX4cKu0KB6kU8rRwGGedhBiCNUh0kyzFrVzBeoist9gaGdXMAlQNUo1+27YjlgQEolczkmHjef/hAzF2ON+Dg1evniN5JjrP1998g/OR9joyFccg73Htlg/bFwz+gbw/4Mh4EnncgRNVlDjh7ZdfEEPgYtMzz7NSiZyzbuiJJaCGTh+T1FfGEPVPhUbW+TmKQJDx4II2oRwkgvrGAIveAubDEQ5HM4vWwOeKMMSG9w87bl6+4PN/6QXv//lPbcheQ4eKPbZDRy6aSSHZJL2a+Y2zWNWRVmwbp1hofbwlT4SovFgkMx6e+Oqn/5SH/cz28pIf/Oh3caHw5mc/I6Q9r370mn/2z38OPtKGgPcqvtgdJ+aiZjC1J1Ax74tNx/E4rdQ3yYpBOgeffPKC3/n8M+4+vONwODBstkjRJs807ZimHZ7A08O97S3RKaVFqUnDoNxKVR6F2hq3bLXwtNsBarbcxrAO7XNWDYklFd7pfPdkJjlqbq5NqkqLyqmcNVcdapbl2R8mQkxIVozWA+Icz1+8oODYbrcMfUM/dPT9hiKwLLN25nPi3bffsrm4YVSO2He+vicB1PFX/pV/lbyfuJt2pDQyhEhKRnWoChF0auMSI1JmkPDR0KmayIB16LJhkiK44Oj7gaZVJycX1WDg/f6JD3OBq0vKEJkGoPHkIAQcbilQGq4uOkI3IfuRIkmNiT2QEsFhWOZJc64qkZOpCWDdQkUkM7r5l3FRqIC8Brciy3pfpBKurZPsQ9Cg4hozqNWsW/Xamk14k6dVv8+alYio56c2MIySnGBOmcN0sLk/dXSHdlZ1vErSwXhL5ttvPtB4cLGFGOh6GMeFpwI/vL1iPI7E5oIUnvP6+lLVUHkhxIaSEgXHcZy1nF4m5qRldckFF7zN0YnEprXGWUbnPRXqjCGxB33e7K/aZ1Wr2V5eqVq/ttzOn8mKBZ8cp5wdfrkUe6EGZMH7lqf9kadx1s52q9zYeffEzdWVPmtBmQy+QG/jTSrMIEqfqwe+lA3Tbq81gVGuPvu93+Oz4rn78I5//qh01+4AACAASURBVE//kVoMxsj2quP94xPQ0rXKt2xbDejj9EjX9+r/ilprZNHKxju43A4c5sycl7ViGfrBHPWFnDL393eGgWNUuJ626+hazdSnaaJtGooUxmlSme10xDnHca9jN5YlUWWxqVhXvqhSKXjwUqsgDGM3N62iTyHYelcvUBWj5LSYErEo35qT7NZ7HXfd+w0OxzJN9F1LEa/qtKJ48zjtePnyBTihJPVyTQY5PH9+y1K8sVu++/W9CKBiErMS4aa74cnv2XaRdtgAYi7qOn1w07eUfMlXXz3RMBOcW+k11QJrpRF5r76d5uv58LgnbnqaJpAJ7ObMPkTyxrMMkLZCufTMIZG9FmFugXksHGbhd59dcnW1pb25ZN7tcNOMpISkRMmZZV5seqQpwM3BZzW80I6IltpWUvugemwBAjpBsCFSpac++DpMl0p/LDbWdS2DrDm0du8tW/CYWmo9WJzhzbJulHoVy04rdanK7nCeEAfe/OoNuWTGZeYojuCFl9cBx0LwDb0I7x8LaT4ie9j1A3IUwsM35OBoiqfIsgaTyisMNkolxkjbqYWfc6a0wuzmUAXJw+OTkbAtRFrWHpvItutUmJB1NEjOmeLqRFJTcTlDXM9gEbHFVc0y1DkpkBFCyUxPj7Tba9343rPdNAybzdoFTilRKLz5+u0p67dMTB/Nx85RXaumwZu+x4fC5LUSKM4hGb7++S959Ts/Yvv8M37Ybvgn//gf8yiZ3/3L/wZ/+H//M50s6xsWn3k6HkEWhMBmM1iWG1bBxTzPbPoOj3DRwP1cjJs5E2LDhw8feH5zxd39HSWbt2fR2VtLgXSc2B+OSlUrgsjeuuDKlXU+GFUp0LYqWNBGoE5yrVS0ZVkYhg0Yo8LZjCJBh9BV39UlF8ZxtEGAoj0H56AsH9EPKwzw9Hjg4uKCoVc63bzMXPYdl1eX3N/fs0wjKSU+//xzfvGLN7qWGz0wggS2lz3PXr3kFz//QB//XHfh4auv33J9fU0bAktaePowM3y2oWAEJGeokvc6F7xoFiWSCcvJ8aburfX/0YCVSyHESDMLsWtwnTC6wNg15MGTBk/eOOa+sIRCaWzRz842qvBhOtB3W9Iy07ctZVF3JkLABU/XRKSo00udiV1sfG7KiZTSiYOq74w2NCt3sYjgbMREqAYiVAmqrJsS560zrT+nWSpUD1X9OVH+qDPzaFFYAlgxUjgFUe8cYh3tGmiDZbI52VRKm6WuvzPTdo0GUKAPnn7bIyXyME50TcvNy2e837+nTYns1A9TGwHe5K6BaIdbSgs4z/39w+qAVHLWe2YlY2wil8PAZrNReGVdPZbZ2cGjt0ox7ooLex+0/MwqoFBuceY4Jwu8aW0CCmIsD8x7VV3jXYh889UbJtrVEb/rOjabCz672BBDXAcYig8WMBR717WQWeaJeRk5Pt3jXGBoGvpNz6sf/Ygf/6N/wngQ7r9+y83rH9FeXxN9Q9sO/PKnv2A7XFLE8ctffaGyXfRz4TybzXZFss5ly9M8wbaDKirAyuUYGOdxDew6jWRZp9Wua9Sogng9iJacie40oyoXThQptPvddR3bTUfj1am+H3qO45Gm0WeP0xZbdA5sioL3mggN5nJVp06EEE/Pw9Z+bQbuG+X67h7vLPsXxmPCCUQXuNpeIiI83qmfg7cx5uJgLgsuOb788j1ZbHTQb3F9bwLom6/eIs7xw8sfcH15wXL/wHj/jng5kJ5Gmqsr9QsthRBarm6e8fDN1zRRvQQLsqZoNb0XqZmVKDfMORZElRmNkFpPiY7UepZOWDqhXHik9UijqpUyWraYHU9PmZ2Dq82G5TBqN98UJh6zVJOMZP27M6jAuci6ue0kFYx+kazst82uZhmLBVsxXNSknDWIOkGCwhf6PagBiH3XWQ2r004t49RhX5yVkGUFFJ1hV/YvK+4KEJtITaxCDKY4yXgfKVnZBo0TXD7StS2ff3rBTz+MHJ1j8/pHlF/9kuS0fEcgLZl5Oa40pXN3Kck6YsVRMc+AhEhrzbMmRlP8yGoMojQs8y81iMQHj+Skooi0qObfKhPvnQ48I3J1sV1t7BQnDKZGEpzAw/7IhqISyBh5/fo1x4SVmgpFPN1/YCF+ZPDcWCPQW4Dqh56+a7m8vDQoxeGLcH93x6vPfsibL77g+auXLMeZ+7s7un5guH3BX/+Dvw4UvvrlL3ja3ZMLXG57liVRnEec5+rq2uhabjX2ACE44WIzrEcqCJJmole2xetPP8XHSNf3bPuWJgZS1mma4zzisPlUZ5VK8JBKNrWfHbxotaWBNjAtiemh4pcGiRTFPIMZ7ehzCCaisEdnElTvnU5pDSpjzkU52ALa/Aue2Lbcdp1Vm26Fv2olOo0TsVHmxTxnUhLyPCEFDocDXd8qE6CNRtf601Mi/Zldx8ORx4cHdk+PvH57xyFN9H/wV+DqgnGe6TvVwrYhcHN9xfH+PcXkXxSTgFkZ66QGHOwhanrqALPKxkeP+AwhUAJIFEoUSlugEbq2Y2Kh8z1pGsnRcX84Mj+MxDHh9nv8MuNzIjrHRd9YkDoF8rWUx60JJCif9GG/42K7WelFzkOwcQbed2dNIM0EU84cx5FlSSxZIYFsQLzO67YUXHSDh3A2H6o2keRUVmow/TgTtW+2hkFV9GjEL0Y380Y5mW3ksvp1Zmgmhjay7RJtnvC5EC6v2fuANy5n9Rmo1ynXqW/FWxupqocEcYE5C9PuaNml2Gcsq+JIf0ibVCF4YtNwMUS8C/jgDAJiVctUbLxm7bpuMvd3j2y2GzOv9ux2R8qcCb7BhYAjUIxSMzSnHFjNTwrVtFpit35esLE188z+cDBZbSZIoW1aiC2Pu5Gnhz1Lymy3l3z55lf8TtsRt5dQCk0beDncKs3Mjsq339xxTKKGNFi27QVKptjBOHQtq/JMCuSZ6ISn/Z7l5or3794RYzSO6sx4nFlSMooQyrV1jrQk66wrljuO47rG52nRTLTUDrvgfWNTSPX+hhAQb1mlV3lwsWcNmLu8dsNdgWVW+acUlSWXMq3Bdd1bRkVzBr2EEOi6jq6LhCaQcqJpG3wMNL7hqzdvyamoZ0MRunZgnkaND/KnpET6s7ocjoeHR7oY6ZrIY0zcvniFzAW6Da0B8thJ1cTAgqouipl8rCHAaCvnztWIw4lXb8viUdZm0AaMZVviIJM1wPrCUhayZI7Tka6x+dM+IC6ylIwnEFxcF+icCjF4dXPn1KDQpHDFFgAlqGOLxvkq/TOsFPXprP6SwWs21sZI32zXe5aL4pllhQB0cVUvy2NW2ze1eNOTOvqwzukxyoL9VT4KbICxGLTEnqcJHwPVyd3hyAWWlJVjKALtolK9tCOkwPL4xHK5xW96/G6hODkFvHqYnR0sgiDBr++l2pEFspXiqjV3VT+PV9WTD6tt3jEnyFDGkfv7RcvCrmO7GWibgLMAnszbUgyO0Gwlc3VzSymJiiPPy8zPf/Izfu/z12ZyrRzIFS+1+5RQGfK0qJ9tx4IgOqlTtBLx3YCITmUtIrz7+p3O9MFT8KqzFyESeXF1w7uvfkX74kc8u7lis7nCO1WjkZPqyp9d8+7hsHJYFas0k40Y2A4Xa+Nyv99xc3WJzxObH7zm519+w9u3X/GDzz7Tp2BOUZeXA84FUnHs9gf202n21uFhbwsjrTCBswrAo7O/ajMPsr0nZ/+ppaDOTjJ8XbIVQJUza028mgQ5vw6Q8NaQraX8eZKS0X2dk5DKzP6gwT3EgPcT4/HI0OgMKTVTicQm2KwnnceW08fk/N/0+n4EUAswpWSKd4zFUV5+ys1nr0+wX00l0e5xH1s+//QlMu2Y84ArWbuPWe2tShHGacGFaF6VmtFkRBVIksDp3HREA5Yr2rmXpA+0ILjkcBkka8nlihpPUBSbK0mVRVmEo0GQxfiGlaYRQ7DmiJaQdQO3VhbCWRPDZENZivpzFqGYXVgI3spu7bSWanNnWLC6NgmhhU3fcmUYacW/RHR0xewd45gpXkO/GnuocXRaEuKdadY12AXn8S6QloR6XAZc0zFOCe8LkiH6RMkNJR+JNPhYWKKHXJDukrx7VIzRmX4ZC9o1A3b1Kdc2rX49OK/dWm+leWU7YN9jKaxft6C6/jgXkOCZBeYx8zDudCGJ2qOF6pSUNdNum0jXtVxcBA67IxfXlzinmf0meBg67t88cnt7i2QdfKYcRz2Dqsu8TlUVm0Zpn7MY48FpQ3NZMmkphKYlthFZRn746hlSCrvdnrbTw+5yuOHdVz+nT6/45ptv6TY9z15ckydIuTCnYqqcQhs9TduRBXyB2Hged3sVJ4iw2faUsnB1c8HDu2/5vR++0GZnKQbFZEJoLXt0tE64vRqYclFlVy4mVHFKVjeIpYgadpRc+dee6KIJFtYUwp6bWANTmSUX20F18ClxHEdmG6+iuL1NU7CFW6wpijW0aiwQQavNs0kP2pTSRhcOmq7XQZR5UTw9JVIK9vqVG7z/rULX9yKA6imtp9U0TnS9KoXuHu65vbmx3G3tI6N8MNheXHB3eCQY3uiLlvNN52m7jmVuyJijex3xGxuIgdJ2TLFhnA6QIU8gEUoDIoqNikCcHXFyxLHQJGiSBrbgtKEiIejo1bUcVHNe74PyEHFkgbQIIglxiivFGNnvRx0VCJYJyqn0NmxIpZWaIXVtS9cZDlh0Or2YeGANLCIrjBBsQRXRje6946LzuL7HXW+MqmNa8EV1xjFEctYDaFoycwEXW8o0rdlUTjPeOQ5JEAp9hOIG3JxJHSxLZH4qhGaCvOjgsghSR2lwgmmpTWssA8WtmbHD0bSRoWuYl5lpXla8+JQ96yb1NvdI3bgMd1tLEvOFtQ68zmXXjfn85a0ZXEw8jXseDkfIhfcPO5ON9srdDZeQE2++ekvbbtheXNDEQf1CpSj2bZn+4Xik6zqtIIris6U4jnMiLUeOhyPH/cgwbOi6gXl/INhhezX0TMtCaCI4OI4TP/vJT3lxdct0PDDPAy7rsy+S1QawZLpeZww1TYQszCnTDxucjwx9Q9NGpulIFA3+UrIevoaLt7ExE5H2I8hp03q6oCbcLsR17lQtnysUVIUizjD+ZZ5x3hsdSQ/+VVFn3gwxwHgc6ftA3w8o6ySs0Jc+32CNRGU8zKM6px3Hkdg2KslekxMT5Lgz+KwUM8sBoooHnr94Rs56WHkfrekE3P0Wseu7/8j//5dOrxAyjv3hwHZ7ybwsbLebtemhe0Zp1rXZEWJLu7kkH+4BcCFCzjRtyzQv+BCJNagVWcdBSBGi6NjUwTmeXd7wq/leA2h05AyhsdJrgXAQ/FjoF0+bC26eYJnxZGL0NGiG2UQrT0Uxr2SZsJr0qpVZKqLu88FpNhvqUDTNptSuzlU3WQsunlQgHWcj0demk8rVttsNbdPQtzoKJUs9buzzOoezr+PbtWxX/CmZK73y7OZFSfrO6eIvSyEvE04WLjctbdOpwih4vvz6AxRtJMUmsQlXvH97x7DpeXUTaOSRJTn8RUP012vJ67xbx3WMi/Jfp3m2sdOBbN6ZSKEsmac0AY6UquO+o207pXiZA39Bmx/FII0lJVpfmwqWkeS0vgeP0uPuvn1YJbU5O0KjGGIbA97BJ89uuPn8FYjn5Sef8v7hid1+4vjuAykva/T3BsFsNgPDsOHxoEF5HLXbn1LGOfUVuL68ooTC0+HIxdUW9Zsy6plXZzAdXZ24GS4Z24WbH71iOu4JWRCvEtiSEi542ladlsQqubbr6EMgS4WKHNEJj7tHgtNR2s4mvlaXfCnqxJ+yyoldNZIWZyNrHOTq4lXU39XMkdWG0dkcJG1MtrFf/XSdE3yj2biyIOzAz4627Vnm2TrkQK4+utU5LCnDxAxoutggwOVVx7wsal5iPgY5Z6ZpZhxnHOrb62JUnB6n/rYXDUtRxydxGhaapj3Bf9/x+l4EUAdsNh2740xOiWEYKCiv0q+Qn27qp6c9PkaapgXviP2WcnhYA6tgLkIrlubX3yJ4JNsYgJLoXMNQBLnfsXHgcoEUKI1ApTEtEI5CMzluQuDF0OHF4TcLZZzI07SCC2mZtNmT0jo3qBKyffDGb4O2bczpprETX0/xZbbu+1qg6p/K8dSFmCuH1Ckdw/vI024EJs0oHKuBgyYSVQNvXfVS1sysZn4Ok/+Fo/1dZY+C8PzFS4pk8EUHxqEm1z//xVtuLjY0XWCz3TCmhZQ9Llzg45bg1EO0ER0Xli1Al1IILjD0DdtNh7PDxoeg8869p2QlZueUV3ms6tXViKJyVnPKzMvMPI1ILsSi7vBFCiVNYJ6jZc76GlnXkA9xze6Hi6CZYNvy+PTE7ukAKeFyotsOjIc9zJm7tz/my6+/xQWHC5oZP7t8QZ1hLga3OK+QyOB6xrFFLrfsDwfGcVKXsJJ5eLoD0bG8Ej3jMq2m3DWLfXp84urqmvePD2TJ3O52OlLFqg1wlGWBJBynI14cz16+5Be//AV90+kYXwQfAz/87FP2+0wfWMthcVU0oIlCMVmxt0ZPNOyyrBmMrBBUttrYrQ0dG0XjA0+HI33fGzRQcW5YUloTH2/PUP+uAb9msGuBAadqyvhX2vwxjmgR+qZVuE2SmrV4aLc915cqG1a2gyrtcinMyTFOOnZkSWo+0saW2LTrdNbven0vAqjguL3akOaZuaii4cXzK7IUdscn5sORNO152h149zjiY8df/ou/Tw56Os40NGia37bW4RPdlKwTGAHRxo/kAi4h05GYJsZHaGLG5wY3Crn1EBUWCBn8mGiTcPPsEjdN+OlIXjIyz+YVqoslODFpZIPrGrTE1hN5nDIjM4fjRNOqC3pOizZhTgWtYjliiJ6o2bE3/mY96Sv9p9JmfKh4zgkTXMtj0Szi1w1jS6UQUQnKsCxVV68a7FKUjN22Hc5nfvzTX9H1HW3TshTh/jByuR3oceQS+fZ+BwJPx7vaH7Nbr0E8GL5b5bc6JbJVfqCAD5Fgxr/LvKj5td2X6D3BF0pZ1OwCj4vQtQN56HC5rMEVdJ6QDh5bvai04ZCVW6gzlRY+PO55enji3qRlPgQurjf8zicvWKaR7e0V49MD4jKfvX4O7nS/go0PydPEtBQOx3Ed8SGo5FGNODzb7TWfftKZMMAcilwl3hsdLWsDMyed0Hk4jBTxXF1eEIL5m64zfwrORX0lB7eblj/8yS+4uuxocoIm0BRohhZcIi8nvBacBljRYJiLjsNW7wM9aLMtl2xZezGPTs6fq3Mfv6Z9lv24o+97u0eVW+wpbi1+1p8rAj6YqY5AyovNfTdJb+FsOm1Z1U3Bn2UBITBPI90wqKqv6Eby7iRYSckxL4UscDweyblwcXFpXrknee13vb4XARSUs7bdDvjkePv111xebWx2SrDTUthuet49HEjLwps3X/Lp5z9Q44PtBWncEVDTXkTVOy679WFXrmMlYUpRJv5lPzA9Hgk54KXQe2GeHM5MhUMW/JIZQuSLH/+UVuDT22e4pKM6dGyLzkhyuDUw6aeyAtwH+l75i223URJ3Kb92D6qKyFVo78xirZaip80rYB1yy0YqPciyi/qNtbNff0jqSq2vUV/dOViHeFWygMoui2j507Qt46jldNM0fPb6Nd9++zVfffMOcZ6L7ZZlmU8Y1lnHVDMLA/5td2YR3j0uH2Uaa5biatBVgULTNGw2A/horARt0ok19VSxouWp9560jDSxXcc+ONHf57zNAHfadHn5TOWvU8o87fbcP04ccuIXX85stwPHDxnvMZjAQqM7cQ990EmiocmqtS5qRF0E9bFMahY9jiN3d3dn3F0Br4bhIvpn27Q0axaYGIYOYVGLR7Hpo2KZo69NFCHEwGa7BfdA9J7GqGXbyw39zS2pqBl2MdlzdXUH0feOo0yF5IQ5W+fFOQ2oJh32LpLEOtv2VKuzPM5TTCwgIvgQuX94Wmd01Wd5osrZoWgikpPbWG2UCv2g0zebGO39N0rPAnCaXet0T8VXY9Ox2x24vr7G5eo9oL+7lIwPHSwLIoVpXthstkyLNnIVG/9zrUTSpscwdKRRaIaWb96945NPX+EDuNiAV7VAEz2ElsNhr6YePtJvLyl9z+6459u3X+Nz4dNPXpCXac3e9LLIsGIwajby4qLn2/sjJQmhdbTiKWWmbQI32y2Xlx29yyxDxPmGL754z/T0nt/9waeUNFOnFtasr3IYU8p2sgnj8ajOPVYyBaMoKQHawqSVZk5qyVQD4VmnBQtEzrLb2lyzjVdHxlbKx7kxcz3mqyOQs4WtSiAtn6P3pKJigFKEd+/v6IeOz15/xt3DvTqpF+1iv3371Wp47H3hcDyqEqVtWbm4a3bCyoaoaUht7DhfS0XNHtbvEe2ST8uMcwvvzW8TV3mqasIco6dve7pOyeARBzHw7fv3PLu9tftQVGtdkkoQsSF0XlP2bRsYbi745PYSKRqUQCyL1e504Iwqh1YbxQ4DH7S8jdHem3PIsmjmHBv6GLncbqgHasVdtUwu6yA8bTAGE2LAX/iLv2uHml/POe88S1JDkBgi11fXJEnKykCzPk/m5vktv/z2ketntzRytJLanWVbxbB0YTN09H2DQz/HklRVtCR9H/Os42PUfrs+v7KqAXEqy63E+2HYMC8LbduxpGxZ5kngogdQMugo6DMFszSE93c7uq4jp91pXXuvSiZYzYIAu1eaAb97f0/bKETmowojRDx/+OOfUgRefvKKly9fsRic43wgG+H/t7m+FwFUgGz5fWwivuk4HvdM44TrWmLTkGOHS4ueJl6bHpuuJedMF5WMvLjMi1ef8O6rtxyOR3rrIkjFcWqvVwTnC1K9OUPk+cXA3W7PcoTWR9ou0vrAlkKTJqITyDMpCWVewG/54sv3/Oj1C0LMarFnPDgwPMw4SSUn+r5jmrW7mnNhmtTsOJyNqcV5bfRY8KgZ6ceYqDWAjNAOp5P9XAnjHLigC2/d9WdBeCWUc1rQdUNHHxCvi3QzXPH69Wt+9eZXtLE5NcAxbqk5Qa0ZdM0mqBmnvXf5+BOsH7k2z9YVbJlwLd3sw8j6ex1ZPLWzjldH9zQt7MbZDgj9fMtc+Pr9PSF4tpstXXD40OjYDeraMOqX0460ZncqIwUh2tSC6nBVIQ6pB7HxZJW9m9fDqfJEReBwODL0PXrLBcXzwWI3AWVeAEjTkfOCD7qxJav0sus7y/AKxdzyK3zTdx0fPqgZSOt1cunl1Q1LzuzGhYtitLj1Plb4Rt+ABqhsY6AzFGgC4D3bvmGaJrb9dsVMxUr5ag2YLWvVrDCsngRS4Hh4ousHpTSJjqRR3FXZN008TdClymAFhmEgp0yMjY2rCfa8FGOOjUq+9WsKg1X7x1w8++NkWfrE3f2jjVkRvvn6a4oUnj9/wc3tLSnZQep+uwj6vQigKWXe3z3qzYg93dYTg7pM5xjp+56cM7cXG7qLGzbbK4KzIFWyOa47fvXTn3ExXPD5p6/J8xFXFtKvZUHFysqc1DlGimahIXpuNy3v7/YICRZPu+3pWkcUh4SG6CO7pyPLIoj0OO/54s1bXr+8xiHkcmaJJQq/OjQT7HvFPUNQPK9rhrXc0zjpmOdEFmFJZZVyVnyslm81PJU0rwG0ntBahRcz9NWSP5wF2ZM01Ep9rDQtQgiRerhUHb6W+4kv33xBqE0oILZWehXtsmry6Fc5oZNqRuLWzGAVNawB1t7DOq7ZAogFRsmFpgn2ZMsaPHV4YF6/D/vdYlhdxXRLKhTfMKWCy4Vx2cE6drBKSDUrRYqpkgrilHA9DB1D2zJ0Lcs80bTNekCdDFtO9J2Cwh9afZxeXxw0XSRLIpRmxaUFQWLQzxOiZVSCI9czRCEIEUKr/qNNE6lz0utpWG3+fvj557x5/8/UXKdpwHlC15LEkVIhk3AhfhQoVPrpztaXgAv2PnQ4XRZRHqXo+xMp+JWSpHzOoCcgJRdC65VXqqebqZZmtheXNpYlrT4RUjgJEsRBHQaT1Uw7NpFsRs/qB2r/nLPS7mK0cTT63vOSDXtf9P0YnLWUbCN4Tt65dx8+cHd3RxHh1atP2W4vvmvY0r3w//UNzrke+F+Bzr7/H4jIf+yc+xeAvw88B/4P4N8Xkdk51wH/DfCvA++BvykiP/+TfkewcbTOecZ55unDt4SmU97i5ZZSMjcXV4Dj6uKamlY6J/i2AQp5Wfjhi08AIS1HLdXcx5bCZ/WXPTS9+aVkfFKvQ5cTS4LigZxo3ZYANIPjxz/5EnGqX49+YcpC02zZjcL1VvDOurF2stfBXS5EHh4f2Ww2SM6nTVQKOqZEsbS+0WC27VpyVtxvnGaScUKzKYtSTuS02Kx41qFouv6V7lHpPB91MtfPbduvvr+zG3OCDVgDUiVBn181w9bsSy3ynNfRC2rPZjdcWGGDTddwebHVUrIGIYMtvJHOD+PIPM4cDjMNTjvPGWMy6ETU2jAN5hzlvdqR6eevGbgnSCaJwjyaWQoXFxcE53Q6wKyd8TUchWhjsiHvJ46HkU9fvSSL59sPD8zzopzf4Gljw3YYNMvxUaXEIVDn9FC0IePF3OeXBTql+rh6x0UnwZacLCuUtelZiuhBDqcAVyDGVjMucYgkpHiWvLCUBl+OjGXDRgrjMrPtrnB54jiOXF8qHpwN4jGKxqn6qUezO8ljSyqWCac109YsGDQhMYHG2eQBbUxZteAKHhV1LONBqX5dtM/T2XQIv0IyYnCQzsBKhh8XnelUdC3lklbDFgSCq0FUcOHMIk9RMFXniTdWgr7v2p0HrXLev3vHN2/f/gkR6o+/fpMMdAL+hojsnM6H/9+cc/8z8B8B/7mI/H3n3H8N/IfAf2V/3onI7zvn/hbwnwF/80/6Bd45XlxvyUVohg1X17cUiRynicbp0CpbVhz3DyzzXjG1lCil0PedLlxzKGDMigAAIABJREFUPFptxeCEbchZCLDMrmYJ9R+KzHzy/JIvv7rD+4aUhA/3R+4+3CGuBa9DxpyIzRZS+sf9w57NcEMTNCBq6a48O8eJH1hKIUS//r6q662yTiliGaPiezlPdK2ny+rk7ZqIuqLCctkpT7MUo9GIZqhW+s/zwpSK2b/pwhIRstSPb3irlafV4KJmw+utcyrTqwR2/drZzPNS7LiobARvcG5tHNhjcHCYEuPyRDFHcf2d60MBhMZ7um7gxfNnmhl6zdhrY0afk92/4BmPI8fxyFLE8LeaXYvOurKMVJzD5cLjw+NawgIEkwiWUui7jouLlrbt+ebbd3Rdx9WzFxyDoxVhHifevX3LtB+Zc+IwPq4ZdbFDpgbH83EzfdfR9T2SHI0FJ+2iN/bJZc1sczkrsVOmGnGXUjiMB7bbLToeWc02Uk4cx4l+O/CX/vK/zMO7e1XbESkpU9LI/Yc7Xmxu9DfVCuUERbMGlT+iis3WLwghGCUv6xq1dXaOvetZabiEq6Y5p3utLviaHeoUz4qF6piV4hJt45Ho6NqGIlHxYhvp8vT4hA+90gSzTokQwPuGWgq54IGyettKpSk4AVE+9Hpm2FXx/9/m+k2mcgqwq/fA/hPgbwD/nn397wH/CRpA/237O8A/AP4L55wT+fW3fbpyLjwdZ7qu55Pnn1jG4Oi6OqfEyu7lwDLv8WTDwsxEIxVTIFT8y618yVqW1s1/HkTXB+5VCSW5kIqqOOb9SMqFEBtEolXFOjIgOsjF44LOaI+x4/5p4sVtC/awdQ0pVrXMiz1ov9JEqh9iXYTzdJq9rbQjBfwl6whcRFRmCUqaJ6wKK4s9SKk0FEffBvrWIyVQB9EB4JxJNtWH8bhkdrudZrNSm1Lq81j15yKVgG4IZsV5zx+pldgrXis1zT1lVM4poZnQUFtn/uMagakUpuOMlNGaDvWgMyMSoyrVETDFfCebGGmaSN/1q+fmPI9ryXgcjyZ/NdGCva8k2ul3znGcJx7fT0S/R1LiYrvh4e5bjr4jdh1D13MxbNnNR16/ekWaZp2nhZrDlCIsy8LheOA4yypDPI4j4zzzsIZXzTarcsh7T9d3dG1LEzRoKI6qn7/e027oedrv2Gy3iHjafiAdRmbzoR2GDYd2T85qLeebjs1mw+Nuoe2Um7lMB7x3HKeFwdRL5z4I58mHBnrNMtXExKl0VOT/BWfUq8JlK7piCUSIhlkCQz9wPB71dSx4rdr3oriuDkNUgUEpCw7HxXbD4/0dl1eXQGeSXUfxAe+jQgQpMc/KIwYhL3UNW2Vg2fN5I1M5439cdPqTr98IA3XOBbRM/33gvwR+AtyLmEYQfgX8wP7+A+ALfXOSnHMPaJn/7tde828DfxugcwPby1ueP3uG941mR2v3XJiPe3ZPH1R3XTL4hmlaCE2wLqJOlsx43DoDRjGt2t2rbMAaBurfg5VdUnSwWMGzLEIpTvmCVZlBpeKcSlacysQILceckTDgg+hMIwGMX5elWBbtzP7OMt9auoryV89NNJw7laiVCqJ+ioG7u3ue3V6t7jeVo1cXv8jpZzEjEu9r40WVHp2VUl3b0rqezWaj6wrNCKa5MM4L43x+Op854Di7F7aBKgHec5rjdH55c1Zya1zVZ1w5kfV9CzUzrU4+ip968/dUuU3NlC0UFZjMTCOVI+OkVmYlz3rYdIGuGdYqwPQ0Z5vIDuicyUkH7c2L47h/Yvd4B7RcPHuG6xf2D09Aw5dffqtBzhQtDkfXNLRtw/biittYeZGahXnvVxlvY83GZZl1SuySGfdHDo87RfTMt3IdayLYgaH3/M23T3jveXZ7C4Y555RAhM1my/Fh1Cap77nY3rK5VJaDzhDqAMdm2yE2xqIejrq+T+OClSEgawJXRPF8b4dw09QRw+uu1j6DZdDVT1a/5oxSVJhmFZzEpsFj/GUcIlkhEecM/64OYGI4feDq2XMOxyPesxpnixmY+6Cqq77XCaht0/C4Gz+CqIqIQgLOGQzl1vv721y/UQAVbXP9VefcDfDfA3/pt/ptH7/m3wH+DsDz4RN59enrFbfzK36W+ParL7Tc8eq6rgshqZuKjXpQDax1EJE127HC0jKYakTh1hKzZkkOPYWO05Hstit3T7ItlpQ1WGOLzeuKcr5RfbDX7u7bu3teffIC30blo6aCeCE0gpMFLwuIOkA5w/40WPq1/IOToqrkvL7nOpIj5czFxZacsxG5ayawGoMpJICWX1WJVZs4xQLN+jukwg5Vpqf3otlGtoMuj5R0oNg4TegIWz3EdDS0kaQNa00p44jUAWIrNxUNiisu60AFkNatNpMJJ6zcPaTgpVq1FdNwn5oGpWZOa9YkNuRNhwcK6l25pMKz22vTyJf1tYMPlOLUaLropMcc9HCBjrXRBYhk5v0HPr3dcBxndvs9iySCq9zflmXRDHR/SODyKmyogck5R9tGNkPP0Le03UDXDXaf9N47qYR5DYp3j0dy0ueYUiaXRAiOrmv1Mz1/xtu3X5GXQhM9rvfs7hxNO+DywuPTPTQDm5jZtIbR+sj9u6/ZXl7RNI0FTAuiuvM4uW5pcFsy6IgaKGj1ItTuvTmLGUSgOvdoVDlAlOWQJVMNoJ33jNNE1w8EtJILrl3XcS7oAeWgFJimRMnz6u+LS3ZfDIpyau5SR610XcCHyGF3pD6E+rnwHl+g+ILYALoXtw38Fn4i36kLLyL3zrl/CPybwI1zLloW+kPgjX3bG+Bz4FdOnYSv0WbSH3vltCjdoW05HI/sdjsoaobbttHIuhrIpOhpn6tUglreWfC0008NVm0eu32fSchP/EjqklFtdfGOn/zsK9bJY7XENoynSgmr/R12ktFG8I4xCz/5+sNalqVpIqdELJnPP33OEDuoxq+lrAvs18uoCi0oZikm41cAv4m1w1lnnrP+jF7OxiScqEqV76iiC/28NeB5PJtB+YnrjXKnLFADcqDpWiVrU7vq2RyHFJsU0SCXc2F/POowtVLHmLBuxuqKf8IpbaP6E86axBoykgCvZaxR9ZSaxYmSA6tvaA1463RS0ZlKOcNX37zncjPQDz3Bq29rEkGcDTaLKmHEAjUVE3RKsXNA32nJu+kbLrbPdd3ZxnQur/df2Ra9jnfJmXGcGMdR38ucuDs8cei6tTpydu+VZ6lrthR1WmrbwOZCs2fs68VGX0g66Bq034E4jscj3jn6TnHBp8cHrm5brq6eMx+ecBRSmnn27Bnv7+64ubldV09NKry3cc12qKeUGKeFedGJCi5Wa8nZPvFo67VY4FWCe4h+1dU3UW3shGzeEAuIcHf/LV3f28as8Nvp4M2mvqoHUcVXEWi7jqenJ4ZWA2/fqu69byNN1GTreDyuzwjqwSAaxPHgCq2HH75+rXX0d7x+ky78S2Cx4DkA/xbaGPqHwL+DduL/A+B/sB/5H+3//3f79//lT8I/Qbvwd++/XsnFbdsSvc7KyblOOwzacDBzjhVjQ1YMo8J0sp5IGR9O0sdzHDAYCVqcBrP9fk/sWqr12xpa1/rkhEX5oDzJqWSd8BkDY55JoQEXiX1HBhZxiM/4kvny3T2/+9krIjrMKi0JkUIT1FE/mHZfAXlZN2d98s7uk/OeLgaWZdYNuAaLE/0oNuqak3K2YKCfPYRg7kB2gIhTjq3zFBs/iyXYlTNSZ4RnMxxZUtJZ8ks6dfqp45X17eoYBsfQt8QY1+A/TpOWY1Yeapwqa9zGsg3LKSg2ObTqujERwjov/uzv9eGLVMWWrhlBZ8k757nfHWF3PLufjmwabe8dXd/TN9FK09OB5kNV7zjG8UjfNeS02Hq191AaaxIGnTXj1Li4aTxt7Lm+HIxq5tX/oAiZQk5qrjzPyzrFU8vRyJIyyxzZ7ybu7kYbtZ2RNOODI2WFYJTwXh3glWz++PjAFs9f+6v/GqHpiKGQp4NSjcxC7ubmdsUeKw2uVnB6UGpl1DQNITZ0Znd4mCbUByeCQDbcExdIRfA+kkQUDsl6sARfm0z6uGql6aPaIqqW3ZOy4FzQ6maFiNz6rHUktTJ2+r5jGNSvYDP0OHScMyLqBhVamqblaB4VwNoorQeXl4XPf/AZlD+9qZyvgb9nOKgH/jsR+Z+cc/8E+PvOuf8U+D+Bv2vf/3eB/9Y592PgA/C3fpM34g2zdM6xTCOTBT2sPBeZLbOsypcT6F0sQLjanFlVNtUX8KTjrRGpmPEDONq+17k/2OgBex9NjDaSts6FMXpO9LimpetalhgoDYzicV0gtBHpOg5Pe3zs8UshL4WYGn729Xt+75NrgvcE8wHFAlvNmFLKK/ZVv56zbhAH7HcPXF1fY5Y1q/3Xyu8Ec/7RQ6T+HZF1c9Z7g1NVTMonnFOQNVPXOFoVMPq7Inqv5+zJaUYwv1FvBiYiOpbZqSPSkqpp7inL9vbeQggMTaDre6rhcz3Y5mWhiUr/ylkPgyJWRaya+lPntEI0rMecWw/CoiUKzoe1+y9W7juvHEcRx+GYeDrMa2CsbUcnJyu2EAJz0UbeZjPgpBjzY9ZJr2RC65DiVxgjVnvBqHSraTzwtNvx8tktTQTXRKQPwICda7a2q9u/PoySM+M0cjzo2JFxFg7jqDhiLjw97TjsdriilKKq7Y/ROtPGkshnmF/TNNzd3a2jRk5lSE04xA4yTxs9XdMz9NEOKlu3znMcR7wP5JSY54R4e97nTcIKz9i6r88nxsg0TWoiJLKa4aw6I9HnmS3Iq39ENt8Bfcfj/aOdoer9UCQhRMZpWff8eaNM4QbhL/6Ff5E2JsaxtnO+2/WbdOH/L+Cv/RFf/ynwB3/E10fg3/2ub0Sn/QG4FXCvlwZO7XI2bSCuTvNogMUetmWgLuhAr3ra6NCsmuGd6CbFZGTTNOGcXyk+9fLeaxZcFJPS7i+I92pD10ToImNYKF0LTaC9GJgBwgaWBDP4CcroTMtrSpWsxGtyJcDX7PbUMc9r8F931Foi1Rnb69A051Q9VE5NmRqk7LmsBhLrJnHK21SaSFnJ3N4p1aWW1Fl0IJwDnGWUbVMopSGVzHGcWIq6HakBSLZDJ9rvBk19izlIGfE5F/ZFOMx7OwAwn1Ps4Cg0rTrwxBjJRVC3eD7GsTnbHNZsqSMiRKCJnnmeidW82jtzsXDIygew7SrgTDOOBU7tAdeqBsbdBIz4e5177p0zUYFjMwxsNj1N1Gfl8acGXk44D33b0Q+dmiqbEqcgxvTQ4K0QQiEaI0XQpoxvI1ebggsN++PE/cMTDnWuOh4nbm+f8f6bb8GggBjUL/Tp8UHHdJS0Yq3OJLy3NzfqdH92H8/vaxExAafTPoMLZmCs0Fogc7XtdNJB07LtW+v1VQqWKLtlXvSwFnCrzlfXQtc2HA47NoOOU0lZpwJwFoCtADm79FnrodroKJKmo6TKtbbg6077+fSjuqbvPtzRdUD+7TRF3wslkjaBTg+uyEl5IpY59X1nSq+i9mqgBr2uophn839gDca2k0yHfVIi4aBpW+7u77m8uDSPwri6ITm0K9v3HYfjcYXXQ9vgm5bcRti25D4wh0zu4MUPbrm8Hkil8HB/YHqa8ceM7DJ50cCbYySWBD7gioeYFRsUfehFhDZGteSzAJjMpKFpGtwwKP5r2Gy0cawxqqO8N/mm6nuFaPw9OOmFa9NAebRnFBa9qxTvEMtci/EAcxYr7zXzrVSiNjQrlUdETTHmuXqW+jXjVUZAXHm6ybKHxbi82fi4Yo2xklV9skwJXMFVPDQoA0JMouqozIX6e+ywQA8UjNTdmD6/GI9R0CBSr0obc17sOdjrOVQDL6fvTIZFVxf9YtBHRng8HDjME2lZtMvs3Jr5BSdsNhuGIaql4XpYiCnrCtRRzE7Xbq20bEOY874ablwMA40P3H3zgYTSnPJSEI81oxLT/oGhC3TBiPM+4opJWe3zzItKpLuupwjM81JhRlKW1UugSiWFRF0xlX9cexDRTEvUS4C1ivTOXP+NbbImSc7pkMGk3NZpnmm7FhG3BmjLi9ZncGpMGtxjuHrTqGBiyjNdN6hiELE1cOKjAlACeM/jceYmbvDlTykD/bO5qr74tMnr3WrbxiRssm4+fXBlDZqV+rPeJNtIpco87cQr5xvOSsDLi0vFCrFipRTarlcttIN5WVYYoTinXUF05swSoHSOpfWUQeAmMg5ZxyfHnsc04rJQQiZGp9ZqIeKaxjCXom745lFas7BlXgzv0gy5UpLGcVw77LV8rZLJOudIg6dy9rzXzGsdtQu68WtGiM7sVmqLYkwnY5Hz4HLCJQ2RWp/TSr2yAB9DIHl/GqFcZPWGJE/0wSMU7ZJ6T84n2tM5KVvNIBrefXhQg+zp5EgvLhphmrWaMHDXFE0mYAANJnpEMc8LbdeukMQ5NL9mXob/IVLPXisAZf17rEH8DFcDx9APbIbejIUz0zQxTZPNqJoR8Yxpjzzs9BmURN91OISry0tiE2mak8BA0E58zdhrpZEtmtw/PvHw+IQbZ/qLS9I0czge9BDugo4pFmE5zshZIHZ2EIo4mwyrGf7xeKQfNmpmYmtnXhQ+wjwBPoY0dD2stores1h1KHPFFGXdj/5szaxAy/q2FPuMjfI520Z9Lpw35sxZ1Yir7JXzg62uBui7nmme2e3GdepnVU1pS6AyQgopFfbjzEX/53kmEpw1UE4Px6GTOusgqjqiF6nkeF3RueR1U5y4c7JmEKDNFt2wGqRijOx2B53aZxQLsRM1hsBhnqwpUE9Mfa1cio4qbhypc8g2sDRC2cJxmxiDgJvYuyNpo0XsfBRc4/BNYBHTinunAgD7LNWNqNKMzqc5Vpxzmec1oNbSvGaJoMEDk9w5Kp/QTm8rMXM+GceGEIxK0ts9VW/IVVFTzZtXOMQYDlI3hXz0WpUVcKKInTrZNWPWzE7zKFXnVQqSZnGKwRaWOeOSdptnm1HlY0NaEtE6tTiHc8EODNb7AYbx1gBnv6Npm3WdcAYlnC535lTkzipIt67J+k/jONK27RqcvfdM88w0T6xb3tZgCJEYm9U9y1mTrZTIlHRI3d3Tob5TVtI3QhE1+agshRBsvn3SKiMXR9P37Pd7pt2BITa8vnkGruH+6UmNN2Kga531AUz/jxqMgDMqU2az2fDhwx0vX74k50zbNnRdCziWnBnHmSXpxM9i+7XOKaqNmXTmKtX13WosDjqNoX4uW0Kc6HdKT/SGuY/TaA1MFRXUfkCtptbn5WqksK/YGmualml+XDvw3gVidNzc3PDh/QeFmUT32TQlLi7+lLTwfzaX++hmOsOvqkyxfrE2hOrPyNlmx59wxPX75WwQlahhrQMQnTXTNKqxLWTVIzsNlNM8ros/5+pmXvCNyueKQ53B28BeFkrn8BvPU9rz4tULjsc9NxfPeJAHSl6gdZTGU4Ln6XjkZR91GJ07UXxzMgoI/DrQszZgrq6vAZWeqTRT/71yOpXOU9aMtQZWvR0nEnztaNaS+/w5SJVj6v8COv9JbLGdMrWy4rU1eNbKYVkWlZla5752pIodYmLvucIAtULA6eLHeZpGg+TLl8/Z74+qKCqJ4AQpSXmFJYO37Nh/HDB1k9b3W0xHrl3j8uvVSy2Zfz2rcVXFcspQ69rabDYrFFIZx2fb+IS9+ZpxsToKVdzUN4HWNxz2B309KZTiFPowz05XIQKTJJYkJFegiE5XwNEOGy76Dbu7e8a08O7ujpedmUYL+JQ5HNVmrm0aNkPP6hZfS2HrzH3yyUvevHnDq1evbPEpnNYGT7PpNPyZ+q1yelPKSmtLqrVPqdA0nsbX8rqskIgGwVo1OnNJso6/t+TJgysOb94Q4uqhZy2peuDK6R5REyUtLam82Uo5zHnh8uqCUmZevnzO+w93eB81m4f1z+96fU8CqB5fdXNpxzLg8sm0FrFFF4PRNU6Ss3Uj+BMOWssExUAs/RdVDoUQVLlUZOWdSfH4qCe7dui9lfEOZ3QNfW4228UJUxrJzkGE0EX8JvD81TUf3i3M+5GLq5b3H1QmGZ2AKzztZm66azWZyAWPyU4NoqhSz/PSWDy2ILRMbxp1qlK+oQUkr7roinGeYz7nvqF1nVRbs7ZpVipU7XqyMhTWRwMYjcrMc513a7mMKIXKhaDjng0GCCGcdPR66/R7zTR3RQmsJENsDDBK2G5bPTy9dEhRvqP3nqZtVKiA0l6Ox5GCuqfrNAJ9reLimqEUCmVeiF1X5/GpDt4+5HowuJP4Qq9iDUjWPVssCFT4o2Ku1dnIPpJ9tlN1hWCSYw3WxTK1zXajGbJvVrmuFSRg0wnc2WuVpeCLEOwfnu4faLuWMnTsdjse5omnbwoX19c4o1QUY21MqTA+7vXgc46rywvaxkNJgNLUXrx8ydNuz3a7wRGUCmiNtfV9IEZDg6YLiM3fKlbee1T+2V5drnPm9b7Uw0GVR+O8ME2JJWnX/ATlsZb9x2lWmKk48GfwUa2OVjhBPooL66FaIDYtfTso6JAzt7dXvPv2PT60LKWs03G/6/W9CKAOxWIqBhhcYJlnmrY9nVg1qyhy6lTbAqoUmJRVsbQqIOzVS8V9UM/HcRzNvk1L5Mp7y+YBiQUJOAOfnX5vMKxPQkMKgexARJtcodVyaDP0tGhwvPcP5rqkb3fJhXFaGEQD25LrjJu6OA3frWwBf8rEKyUpZ+0c1wZRjN6ycL+WMzEqv7TyYOuYXVcXmeZthtedtM0fY1K6Y5w7daFVyeJWQwkl0pdV1qjvG5oQTQnl1gADVmJVxkH1Sz3LbGu2f8Jg7fOEuDYdqmkvDrqutfnu2Gsa3QVDeERWXHjOmfvHJ5pWMV+lzSm+rpxRQcTj1sBaOYM2SNl+iTf1Vtc22tRxYpv5dHivG/rXstePS/vTOh7HI5vt9mTSfIaxIsYVyJoQZArTeKQsiW5QGa73HhcDbdfSxlabUl6D2MlX9ew10QTi7v6BGBzbTU/TBILpz68vL5iXWffarw1PkHxaJ3ZHVolucEF5vqJWd04cbezIZcGdr23j/w5dT9ditneLjSS3hqLBPn0XVX0YlJpVn6lzzn5vxYlqkiBnn7PKSxMpL7TVfzTD9fU19w87nZo7T/w21/cigIptIu1uNux2O/pe55skTt6DYIvxvFLHWdZQqSdwjs8puF2zVJ3gGE1JscIBTkukXLLRnPSVV6Da5pI7BMmZZRyR4GjpFeRfIB8SRMebP/yCrom45CiPcNVs2eWnteTKIrz58mt+9PxaMwgUVxXjtioZ3p26407Lmw8f7nj2/FazUCuHHXqYLHNaMUoQmqZVjpwtQKGe5rV1ZBhYzkQLktUFqgaf2kWtB4lKaTXzr82AdXCfqBlK06pSK/0/1L1JrG5Zlh70rd2c5v9vH/EyMiKyikynXYWRPaLEhAliaCyYAEYgBBKSp0iWZfCMCQNGYAkJZMkDGIFgAgyRwDMmGIkBqkJUVmUTGS/iRbzmNn9zzm4Wg7XW3ueGk8qIMFiPk4p87937N6fZezXf+ta3OIvxVkzbjK+ch8rScYdfRLhECl89RNbBXyA8Pj1hP40S0TYLj74RSarTjasJTdM1w4iBUL3HyA4f3vxI1MidiIiUwq0JQD5WuLPnNWFZk0a6iltbswMIIOkIO58XmXukqEevoOuWJhsBrB01JA0EhN5xw5Ae9uPxiHEYpY+/mtBGh6rgNJr1Af5CC1kEVEeAI6FfOYclr/AhwLFr973ds2bMRZnLO4+1VqTHo3TxjAHjOKKC4eOEdV2U/rVxAqr10AtxmrtYtG37EIychYdp2rXgTQFJgyJPQuEa9hE8h+fXTL6tk4eHB1xcXLf3Gz96TRlcGalkhXMijieD4TzABR/c3mAYIpbzCd6LONDgAmrJGOedTDH4Hsd7YUABtI37dDg0ebZl0VYxTZ1td7XGrE2qK6/T3zO3KrZtDMPp1rTlu20KIfpgh2HAWWkcwiPUdKMWuCrCvS4XuJSQHo+g6hADYX2z4AIz5jhiVyOoENaUUJLHeQH8ucKnAqep568++xI/+dEPW1pKiumJYZSN14ocjnB5eYXzeUUIXvvNe388Ayipj+xd6iIGTgWjubL2fdfmwZ1zoidQpEhhJGVrRNg6F+mM6lMOSSx4y1PJuSbia0bPsNagkagNBDPcrJG6VWXduW7AwdzG60oBEQgxCh4H6Zdv3w3GMIQWZdpnereplOuI5Egy5+dwOGLezWJpNRLtBPuMaXDS4cMyQjclxnJecF5XXX/ScOF9wP7yAufzAsfdNZFmMQrQNXaD3XdSB+jItd/nnAU3Zpl44JxrvNViHFq7pyRsjqpWuqrDlZElDDiHNSWMcdDoS5kQWpDZwn3FONVw4EK4P2a4c2n7YgwRy3qwzabX0GGKGAdMU0AIoe3bWqVbqJYCJssevc6V0ixP13UrInHH+rmgBS/ENt0WuLrYoZQTQFIQ9A4YYsB+F9s+BTnkGvH4dNBnJLzfw8M77H/wIWiIqFWyt2EYQQSk5YxXLz//tqbq2fHeGFBA0s69itrKw0Lb1DYQq6dF0m1hM2nM6zOjEc9D8DqzxbpFbNofGo4ih0Zh5BBjwHlZe787Q0mBEkGSL+BSwOsCOjt4H1GCB5eCYfZYvzhhmGUUQXlYsL45ivFcCmgtoFoUT/L41a9e4pOPbtuCiz7qNVQEU7pRQ1m4IMYgp8rSKCCQoiwGdh5G34FGOLIIdSODEL1vszlZi03VWmVb11OW6FNxC9EAcK16Lwr2BazNCjYcL/iAZe0Oj0i0CyrJ98j4j25YAWUYEAE6osKwLzTnJpzE29tblHVBm6ejLINiGy7LJMw4dCpKKaXNyuFaZE2QRuiDBzlJhp0LmyYLmZ8DFvELT0KFi6PHGBwuMev9lZio1IrjaUF1BeQiOv5en61Vky7EBtc2gyo6m1UbNorwR81wKv3LkYjV1FqB0DWx1KN2AAAgAElEQVRYTVbAGeOgZV9oYh4yZqZoFd8CDlPmMuOmawIQilJpqCuWnFQ8h1RBXjik5iRLKjivBSIb3A+uOraECPM0IvqMq6tL1CwMA+kO62I3LSayjjhlCPAmZxIn6/Dw8ID9hW94slMHYipWwXvc39/D+xFWCP5Lf/n3kHSKQ1ozDk8LjsejQigqcvI9jvfCgBIR5nnC6XRuC86Ufhp9hHgzF0XuthlPwBaV/tzEQnRB1VIxjhNOp9MzjI8VRzPMxRSOzNsaIM2awqFWEbldVxCNcOeCAJ0KOBLW8ojpekYsEWN0ePWzd8hPC8K5gs4JtIryPSmGlxn44tXX+OFHL1okuaYMB2lXs41W8iqzs9WISbdQj55z6a2bolgEGL1pW2gDNhJjThaSRX9V3L7gmcoptqFdpcEmhJpFE9VXwsqlLfSFOhHZaFQpSwEwVVFc99rLbCwBgV/kXJc1qcMQKIWhnTw+YFkz1qWg5Kw9/hLlCqwg00KncRLZwBiQCuB9lAioVdohjg9o3EdmwHtuhswRt0jZ6FRWEHGhF4GKqnQJPSlgHALmae6pKQFGebdnlJJoINjaM+eYS0HOGeuyCiQzRMjYDoEOhI5s4h6GC1IT3946JCk2oq2NQpBxv+PYxhN31oE8TzMwznkVT+4/l2hwQ0saR6H81dIcgqzb59fVsjYWLDQdzwAIXz8cNTiIyFnU/ZkrYvCYhkFgBy986eC9TFdg6XBjMLgmOAq4utorvStK333JcD7g8y++wquv3sL0bAWzqviLv/8jlCKt4KgVIRCubvZwT17hJYcQHXD6LlZLjvfCgDIzjoejtOvpA7PF0cY2kHtmMA072uI6rGCfqMxvaDtgvH37BsMwwfluDMwuWGFFWjf1nICmNq4JhUQQtYpYRErAkTGHAD4W1ORQ1xVPjyuW8IT1nOHWCp8YOCXQmoCUMeYCVwHHMqpiLRWfv/wSH338EQKAqP3PTa6udjUap/82gRDDIA2ThN0nKBFdWwhd8+p6b7Sj5/7+ATfXV1oxD/pa39J/odUIKyGOUWT9jAWhkW6bAqp3qqhBIHJtyJ6l4uuyIA6jppFGqQKsJVW4tg4pS2SbSkIpJ60gy7MlEGIcxbEohJEr4bhmlNMiBRdtgqhKQidiTOOE4Bghyux5GQNPzbAUbTcMqm7kfac8VW12SLkgl4I1lRYFGmzw6ut3CDHq57sm0swsnymTMqtOiaTmLAiEIQyYR5UgZEJOK+ZplPZlkme+nBdJhqqMHo5D2GRb1uBgBVcgKxTkglxbW9NAW/c9s3PPpAHtubTlTxLdH45HzLP06xs2D8UnNcZpODnZ/rJgRyls0MzEe+mc4yoEgCWv6OpOaOuZIRjqMETMuxm7GODDAKAiFeAXP/tTPJ4SSNeDd1E+kAEuQvofTYpSXJqK0wf8/Oe/gvcR3jsMw/9HivT/OA57UKUa745bGmjcT/OeVhmWN9r7O64mi7YbVHJd2b4UM67QWykf0np2rWCgqbstSlsIZPjRKsZ1N0TsACwPR7gQwfqgCqT9z60JKBWcxHgiVzzlhFgZ+8rYF+AaERmEL375OT75nU8ktXZeixKySOMgROc+j0YvvqJFI/Zn0XStVhtpIJiVRduGf5TKWNZVpO+YWvcSAOkiIvQIzDmkNcGmMDbDDjG0FtVL9VzERdaUGyyQakVeVpTCSGVFc1u2aRVbTakgOOkK88FjiAFrykhZjELRYUilko717dQVwRO18KCsBIJDLgWeHI5LUqeaxMCkVecTSTFpGGSNTFMRHFyxchHtFSk3KT4FOBdAoqoC5wR/9UMQih0YzJIFGA+1pAJelZ7Ea+fplo412jomNb7H5QnTNGCMBO8IFxf7/poN9k9qyGzeEVehV1XlUadc8PTwDpfXNy3QiDG2Rgwhkq+td92OUjacX/2u3W6Hw+GAYRgbu0POGeDSdRlAFnLo+9Ehi5Y1pYyaV8FNtZVXHFpRfNgKSZJ1npeEx6eEUmTKaqkFy5rwycef4v5Xn6HNkuIqcAeLcHYMA84JoJpbqyeD8dmvPxe5RmLkvOAP/uAP8D/+z//Dn2GlfvPxXhhQOeSGWdr4jPqxMZxmJPvYWG6Gljabcrsw3759J+mHIyEqaz2KLXqlPrZ2Wc4t5ZMXcDNarGk8EQE5Yecd+HyCO6+ovoJWr95XqC5UiqRApQA5C1u3ysZJhTH4AKoVVIHIhMf7R8y7EWbBmWW8a14kBdpqoJpkW82Ko2laaRGE4WJkXSwarVWlgUm0LUIVJhRhpHcB7TWVJX6GD3FlYT0YLmzPrsWgTueAB21kgKblVcVwt7CLdrOQQAOOCFkhnJRWBB9EBDmKAZ/GuKF+yYAxsKgcoaxSXNGILKuwCVQ8WaJXnYDqHaIKGUvfPaOsCd45pEMGjqv2pRsJXj7T+aE5Ia6bcdTMqtAu026cOrPO240KS5UNK4IhBT3F+hSbrKawzoyUGct6El9pNDQSnNK8lqgbaZunzpv3PiASNQd2d3unlf3a8PE1rSg5oZQErkk5k1JklAyEWyRZqwxczDrmpIt0V7RxGV4yJ6e0OwZpqKtxL7P29et7uWKIM25vbiQ6dwJzOC3h12Kz5Lu2Q84ZT4eTKGKlBSgJr796iYtdwOXlFUqpuL9/hxgipt2E3eU1CA4vv/iyieE4kn1yPJ1VrKfgL/3lf0o0iL/H8V4YUAJaamqirFbAADbGdIN/SrWYNc1i5arJApHWTNnS4zgK3mWLXj8HLJGHGSJiGQciQ9gcYDO+2VIehvAAHbgUzDGC1xVrZdTzAgoyjVJo4Hq+Np7QuoZKRagCWhMYS6nYfTDj049/iKUy/vCP/i/c3PwYta5wTsbofvnlS3xweyfRTFNbsk4XaoLFXVVc0yjf71Fr39T8LaWEaZ4xTbPIxFlHjbP0Skb7Zk1PTe6uFsVkFfyv6B1NrBQaZuOgSrGASe6piVKbsbAUUb5PIrLMds/751sEfD4viJdS6Z8G1xodrM3VuKnBhwYPeCLkFNTRZLBzIk8IgFGlNXaeEIMQuEspSEnV4U1T1DskvW4CGgsCADxpSynLqIx1PSu+2qcgMHMT5+ikeyP+8Oa7la6nPEUw8ObtW9zd3qpD9LCcibyk70OM2mSiUEspyFn+k8aCjoE7RxjiICpHrBGi7qUQIoIX+lDwNuSQ5F4qAb91vpEIoD8+PmE/TZimCcHL93hnjA0R2k45oWaRnVsLsK6rskwFIS4V+PrNW412PUDa4cASxTvnsJsGaSnV/3bTIFgBz+AXV2IvIM88rQkf3X2smadDYRkyuZsd8sooJWN3scc0Tbi6mJBTRq4Vr1+9bBjzdz3eCwNq6bQNgmNY+I624AwvYn3DtlDUQHTdmIX7ELf7+3sMcWj4EDSxYDU6QnVQlR88bxVtGIH9RKwgYggYY8R6XkQwVqMGAjfvTNXOi7tykG4oz3KtVx/c4Rgi/o9fvMKynAE/4qvX73B3MzenMk87jS7cxuMbmiXOg0rtAJem1bVYlbXjdAbumyZAjLEVViRCFWI9GFpdpxaxcmWdM6RFJ43QzDhXBhwLdmniyxuEWQwYQYs6m+4kkkq3HdtiiX1/VQqOqaQzq5pTZTgj5UcvM6wsQ6mi/5n12Ri/1pFIFHKtSBDN13EcsJ4XkZmburhJKQU5ZTjfU1ppYkAfLQHhZTpHbUoqaUZj+LNQ0gyOEqMsAh4K+eSi1yfFMyKh3V1fX7fvkHvVIz8AOJ1PyhyoXRthM4doirEpRxEBr9+8wd3tnUAXJM7e1lbHvllFzCuO57MOyysaEYto8zCMMmEzZ5zv7zFOA0LwiDE25XnAIQwBiBVxrNgBWqzkpupUIfOn0ppwPJ4F5dLBc55kDtnhdMLT8aDBQOlFLGbsdrOIKo8iID7tZxyenrCbBY5xTpgnogBl8YMW0OII7KYW+YNcn6nxHY73woACALMYOO+8Toy0SmAv5hhZ2dJ8AJuqH9pGMw97f/8gQrHYGBGRJ2/GzTkvKSeJ5qFUtH3PPvRw5OAdcH11CXDFklbs9hfwOWN5PKCmJFJ4oG53LWjlrRq3pOx+3uGXr7/GNE6YpgEyT6ji6XTGPAXsdhHOe+ynESXZxE6jn9joEmo907apDb8Vioek8SEOzaBVltEkzkul8pyPsGp4ZRYtRbbCk6SGq0nrmZITc8c7LQPg3u2UDYuu3AyYU1zWbYbFGdfPWma9Vr8bc0AhFibg8vISyyJFItawu1SJtJxzyKnAB6t8GD4InDU1r4qREXSMM4CL/QUQvLZPsnJmSyN9ewL8KMMA1yzFIxABql4kTAk1qHrNx8MRu/2uQUaSlvrG//SesK4JzBWl9CaPUqu2ylrEJ4voeHzCRRM7/gZeqtfI5JBsPpVCW45IWyiDRMCOcHV5jcfHJ8zWx09GkyuwZgdoAdLagb2S96dxwjDuWiutI9cKnSlLC+3xeJbrqkqFYtHRvbrcY4gBYGGYEOsQR08gV5UmNj2bNGBQEpQetiQZ33xelqYLcDyecT4lvIVS7NSZfP7la+zmCVGnC0z6d/lsg3O0EcXp7K5vBEvf9ngvDChrxGMevg/j6uRrMnCee1UZzG3DmXJLI88T4XojFFtZUhZbHEbktjQrq54hW4irhxnS4Ai3N1cw3c5pt8OaVqxrEgPAIoVLvFVahF6H8d1IBHsZOJUE8oSffnSBy8sLwBGOa8FnX7zG23cPmKYZwxDAWvHOWUni+qf1AfMmKrf7wJAIuPUdcdUCVHc0KAXLsj7j0bKWXkA9wmqjTxw1nNB6zUtOLfIRzqJvqWitUjldFx09ApscYNgi1PjW9tyq4YCmLuUcyImaULZNZZh1FaWsurkHVm1Gu84MrwpOhn01BX4i1JQRnXEa0eCdWsQIhCCdT1kNEUFUiVjXX/BeR1D0KZXXNzcSVZciMoXKRAB0JhXJKOOc9fnVCh+CytZ5rEnSfWttvri8xLpqsQXdcG4XqDmo9hwbpEVtkkEVDTzM8w5FHYBh0k47pKAZnAm6EFgYEAycliwG0DngeGpOaF0WTKPons7ziOBHnQnmtTOpaADRc0hjgpRsBVwHFyQDFLrXpu5RF3gQ5kiY44x6OaDxF6ytk2R67rIsKt/oZTxKZaTCeDrJvCjby7VW7CaHeZII1pGXgtL3ON4PAwoZr2DRlXEYDYzn5jGU+1eNx7Y5Gl4pHuV0PGKed/L53NOHbWOvYURt9pDraXfnzAG3t9cYPcTyOY+UK7LOr8lJFwhDWucaDKEQBHrVFGpcGcAxnRCYkSnilAp2g8N+cPjpjz7C//5Hv0DhijdvH3C5vxAlc6JGgm8qRtU4ofKpjR9JpCLMPUXbVkCXNSHlo1Se1alkjThszLIVpEo1w+U0EkgNW5VIOBsoglxzSyGtJdKHoJXVuonIN8+smvgzNs5GnrEZYwY3KcNG/iaLuEkhB950anXOozdBaSYUVAiSaDhsxeHhCRf7PZYlA2uWNlxnkwjQlOxFDpAxeMI0zmogC1Zls2fqLJBlkXHKtsGdowYBdMdtpHsl6w9R4ZFuGFgzKeMAW2W79fobTkuqsq/RPOk12jruQ/Ykc7E0GiXDBJtrtY4439aJPCNxRt05i/AOQ3r540A4qc5qOiY4yjJDS+sOlmEQV5XVE83YGANCIJWfk3tRM2EYZ6zr2ouk5J5BeB7ahs19LprHiuAJwxyA2URnLnA8rZh3O5ALqHUBuYB3DwccjicsJWJ5SqCn9MwmfNfjvTCghmnSZqPLg4eFUpqWAijcHrql9mZMm/dlYLfbtzSlRaqgVpiwQXW2wCvUa3MGUdS2uIJxnvDDH36Ip8MTahJQnEuRKCUXFbaQdYYq8mLEjErASrr+WIoW3hIHYsQq6venpwMC7VGDUENCkHM4nFZMwwTHFcXJ1E+JzgVrJK6tCpxz1sjQMGNN69UBtCjTEcCEEDyOxxPGYUauSQshiiGTGvoY26IV1aAOQ5hhM6MKsESDtSKQR8kKZ/huCMygGd/TjIm16PYIyjWOpShlMZiVwgWWIXw5Sasmy/llVTMvtaCk2hSgbLY4a9pfywbq0Wg0eIUgTF1Kf26LijeQBFi0O8WVyedPUxQVLR/UyFSsHqik87SYQUzwJEpiAhmTcp6L9Jx73yI/p1VwyQik7ZdZxLR3s0gPEhSDrqrU5DW6/wa+bMVG2Rdd6ISIcDgcelGLzGjLZfcIl9p7rPgn60gzEBY5wXma8O7dO1xcXIjgOBslzoqdHuQDcmXkcwFRReVlw7NGCzd284QQCNF5xBDgsmVIejrqbP2mX94cjTl/eA8mwu5ih/NpwTii4eO31xe4vb4As2Q6S0o4n1eczufvarUAfAcDSjJU7n8F8Gtm/qtE9BPIRM4PAPwDAP8mM69ENAL4LwH805Bxxn+NmX/+2z7fKEKGf2xTbItwWHMsYdBolMedxwnu+E1RMjfX0hZG0Y0pYLK+n6CtlVJo+HM//jG8NwMN5JJxOjzg5voaTw9POKQDllU4jUVb3oxWJdgrbS+qRbZmZIWiqt6AGGtOOJ0XDINHpADvPD756COcqgxxg3cITkQrABX9cA6OPGrOTfEos0Ac+rAUl+sdJwBEwJlIIghyWFLq94w3NChARTT6s/FqoBo3UD/TUlT7b1lX5FIwhlEH8vVuF4uGLSLvAwBNb7RsNq/cW9GtFJyulAIfRRjYMG2wNBhIcUiuI2h3yvm8YJ6ntj68jZvYYHwm8BF1JErKBWvKKiSMVtyxGVbS0UJaiPEaPRv2KO2qu3nCV2++xicff9xS5FILuMq6zlk6s1ImlTCUaw9O+tpZMwxjpRAR7u7uuuMpOtk1C3ZbrWBJBpFocEHUIjnLDCRKlgr+4XTCbp4lXd9C91vDZs/Jota2rnojQK0V+/0ea1oxDqNmCKTTVHtnlCQOz6mGLVVn2SiHY4YVPw0zt3sgW3wVHqrqLzjnELxOItBikFN2DpgxjQMAxrpK0Y51jREqvAN2Y8RuimDeA9+jHf67RKD/LoA/BHCl//6PAPzHzPxfEdF/DuDfAfCf6Z9vmfnPE9G/pq/7a7/tww3HMjEIa9Mj3dCyLnrK1hTSqQvjkiMsqtou3k+r7t2uyHdVE/btmNk0DAgxaj+18je1Er2mhNPphPO64LysMm1yrS09lO/WURR6Pc+TAjHuHko6VyxQWhvRRgV75+A9Y4geL1+9w8XFHkcUXF5M8CFgiDrqtjCWdGopKjkVfPb9cXIpHSOq5ljEY1cWbNB4iJLOFaDhzSQVbBvd6zyspm5FIKlyd+ERY0VYsWhZFtUi6IRsMwDPnhltce0+zsMiHpHNk+JG0kmdbhMx5SIiHFL534hfk8MwyliIXAoCBRizAU5YF0Xv0ZvXr/HixQuEIJXtGEJP84voGqxFClgxDm2N2r3yXpW6bD0y48UHP8DhcMY4RFQIhlqQgVowjyJ+MY7yWZYhVGack7SsAgB5gRPWdcWynHE8HDDN0o/fR7MQxjF2havaU1siYStURruvZsBAwM31FdZVOn1yw9YBt4HJDDpxrjvBZxisLm/nHHbzThyCE5EURjei9hmyD7WAaMwQNsaNPD8RPel7H8b6AEAUsaQKEOO4ZA1S5FzbOBhdUz54BO8wjoMUkVyQNaJQXuvUQ4dUvuvxrQwoEf0IwL8A4D8E8DdI7uA/D+Bf15f8FwD+A4gB/Zf07wDw3wL4T4mI+Bmw8vxoWCELvmMiIcK91HPAphNJF2trr3RSbZaH1PvlWT+9zQ5Hr5aW9kAI63nBxTRh8F42adTotErxpRQGLxlEAfePBxlGRk6jMIkkCTIf2xEJWR6byyUpMHly0DozHERVJw4jQDKKuISKUIGLyx1ua8DV7Qf48ovPcHX3EV598RK1rO26BxuSppvP5OgY0HshbALZjAZhSFRXjR/IIj9SKlT1XNTkHTmhJJWqXFbpT/feY11TS3E5904ai2LBUsyLYUBpkxUNZ2NYldcMO6MbVIsIbaXYzxxJFXu/36skoBU+rPKKtiaGYUROMv2Rs2i3Gr3IeK1ErkXYjgiXl5c4HI4YhgExRKzrinFyrWgn3GACOeG3kt7znNfWflxLbqOqmRlJOaHgDALjXFc4JxHtWQtrXkU7uyISYYoO1YeWDTjnMQ3CwLi+vMDxeGwwRykyFHDJa+fjohtPic68irnobqBO0wJXDNYO7AhduMOBq3RO1awMCxiVCwq7bA/5vJQTgg8t82gFL408rVmFsY1ybX1wK1DWmtszhRaxrHpuJVpJ5MSRzrsJxgPlmpFNuaowlpRwOCcYxbA7aDQHEUPUSPW7H982Av1PAPwtAJf67w8AvGM22Ql8BuBT/funAH4FuUmZiO719V//WV/QWjjVI2ty3KOXDbZj1UxTY9cPwOFJKBoWuViEYJga2vt7YYdZepXXZcHrr7/C7/65n4C5YDkfhdKjleXPv/i1RGGKU0H705/RSZws+FLTMwFemMcF2kiCPq0IjTZjRr9yxdev3+B3fvJjDNOP8ctf/Aq3VxegfAa4osLpSBDIZ1Tt2eaqdCgpwIGr0EfQrzWrhw5RJ386iWA5MZJJtQNaJEJTErefkfOaEYgoBOlnmyBFrRVpzXBOvpeg9wk9fewEb4JNWDWj2riRGy0A7zyS6ova+Uprq067bOIkFeu6KCcXzUiLsbQol5rIMEMckAseIeqQOx2hTBC6kdeuqKjX2lLyUhq30zkZgGfTZL1z2O1HvH79NT748A5pXTVCGprwS/Beo8PUvk+uN2l/OzcISOQbGCWvIrShgiPOCd+xci/Uca1t0qkEClJcRC2Np2mwzppWDMOAV69e4cWLFzD2hLeR3br3vOvTVWUagYz6zilpk4rppEpDxTAMSIpTbw36Jv1SMWldVwp/VMWHnZMZZRWQ7jJY4dQBdQMLQCLJ0+kJVuMAMQbvEOKAcRgbM+S0JGGdQLDwpuiVGCmtUkT8HsdvNaBE9FcBvGLmf0BE/9z3+pbf/Ll/HcBfB4DZ72Dtdlu9Su8NqwIAbpXCVqFrxGp5Nrv9hURKWlWGeTGgeT4L921z2tA2hsMHH3yIh7dvcHF5iegDllLxeDjh9dsHFBGPhAa7IOKmx6hXBOciklNisnLgJEWRBZg5Cy1E35OTikQMEZECapA01DNQlzP++A//T+wudrj98Bbnhwf4sgrdg015Sj4o26ZnKYRMY5CNCqC1/1lEkDKcpn/BcE04EFdEbfG0ltFgmFcpTW4OzCqWLBisHV3vUpxJx5nRDKPNrLLNqHEHmj4BQbFGfR1XEEP75wm1ZhEBVjEOpzihEb8Nk2OFFpzyRUkNe/BBVYVEcNo7idBzrjidTtjtLwCFifKywDnR1ZSKfKe+Ve3MKrWiVGAYxRk1TQIG1nLCfLHHw8MT5mmSIkYuyvf0Atl4D8fS+krkwKRZQ60tsoX1l0OMd4wBx8MJ8zxJplMrQBWegBjERccQALKpripFSCo/qC2RrFlSOp5xeXWNh6cDpnHUHjyWNl8NMqDOiUDwDnCOEQiYwtCeZYdhbM3tkdIC56W33yAC34KFqtoUG1qW80hp1eKsTJgwaEjsqBQ5RQKxbtJup8pOcq6pENaccDiu4oBUMtB0QEupyOmEYRhaJ1vw36+e/m3e9c8C+BeJ6K8AmCAY6N8BcENEQaPQH6Hz+H8N4HcAfEZEAcA1pJj07GDmvwvg7wLAzXDH5nk6jmbYmWyzqqmhpZ/GHYS+Yl2TKuGY11Oryv0Bb76761s6EbM9Hg7YTSNub67w9PCIr1+/xYc/+AG+fnOP2shH/9A1PMODmAW8F/6anBgRoYABJ6wC47Oa8EWuDF+raEGWglhkPksuGalklFJwc32DL+4flMy/aSV0Tr5GrzOqhqhEQWbwqUUnhtWGEFpvOgDdaF1cwummkw0jDssrZmiY5laj1doIh2FohQ2JpuX5iGCwipsouE+qKMVQHU70czHjKjCJbRL5fc4JTgcN2jluucAmImFRlhVkiNEq+caJ7R1awtpo56XHNt1r3GNAO2kcXJB7sK4ikCJRVAW8arjmLNlNSmgDDkHte6XDp27Gyygdzrs23RKMpiAmFCNSRaQ+qM3WdrGI2rtOfWtTMbULyklWQEGH1WlkZypUuVZwqnAuo3JFDKGpSYlThMIqlhlq+7VUePQ+VZSU8PVXr/Dxxx8LrOMYhKJ1BXmaYTNFQW4sww0OHAjcTJMyCSCTCS72ey0pSIec7TvrWEspYU19Squs2ayGXnBW5wjTOMnUBifXb0I13/X4rQaUmf82gL8NABqB/k1m/jeI6L8B8C9DKvH/FoD/Tt/y3+u//xf9/f/0Z+GfdhRNLbcFBitU9B5qqxhXlNTnZaekQ6esCtlO/vkCs79vp0gOg/TaMjPgPd68fSdzXOYZh/Oiqbr45XaYZ9bvaF9HguNalNterkUrr5GZY2ESMISYHaoVvXrq+ZM//1O8/PIrlJzxy1/+ClhXeK+REdn8ntJSXeN+AmiY7zDEJhor2J8sxpSSRqjUOKTb7hOuVYfN9RlH4J56Ge0lRmqjQyrzM+yLAVWOF0NkbaCkKlNNuUfxWguovT5fp2nitghoxRt7rkMcbI2qlmXvdrL3yb6u/bkVbiLPZiCFV2kUODHoUlcr7Wfb5gsZppef3WspuuWGBUs2ICIvIUpEuMXdUy7KYBD9geBlUKDIBbJmL7Jesumpet+ix6fDAVdXl50fC+NFuMYPtixAOLgS5Tsnw/+M9ULKsww6bdZR7E5nk8LTZm/ZWmrrm3vGoRUKhOjxyccf43A4YL/fw1Nsz0ofBaoKltgDdtuU3vYVdfrVxW5CLTrxFdLI0YIkiEH2Y8A8+mdZji6CDhMS6cBBwuvXb3BzfS336HtU4b+fCJ4c/x6koPTHEIzz7+nP/x6AD/TnfwPAv//tPo5aVGR/ZmDuq0oAACAASURBVI14DBfsdBl5R9GOlWEcAZBWIPsnWiQEaCT4DWoOEUnHQs4YxhHndYULEcM8I8QBLojKO/j/2Tvx5n/2bzB0oBn04UoHkmNuhsgW3bomFO18sfc753B7eysRaK1YU8L1zY0q0/MzipFdF2t7nOB0ktIu66r3jmHzsaUQtMI7r+vWoJCNUwAa1San1DBLkUELGIZB39c3VAih/Qno3CSL3jYzkFpU9Q0Kk22rqgasDY4D2jOXFFYENDpXsosgW1MBN5hGziUOQ4cgNnBGw041inp4fNTvq+2au1F1LQq1JgX5e24dPWZUZMxvbdd1//CANYs+g1ZhhKMLACS4rl23OZecVci4mlaqV6EQKbCISLJI0YmupkzeTElaTpkl4zFHzQwwSVpc9L8GcbVMhPH49NiekfyqNqiLNatj5rY+WGG3qkFBqYpGKj1s3u3x8HiQUR5U5U/HcEGEcGRQACvtyqJRfU6u+dXmNIk8Xr9+I/eQtXNOD2uIEWSPFBV8rl8grwM8CjxVvLi7xnp+wjz9Y5jKycx/H8Df17//CYB/5je85gzgX/muJ9J6u1nUYZLqMdokyqZfU7tRdM5jWRfVcrT6HDdSPqkFMy/ccE9NU00SD+oIx3FEYW5je8GMaZ5xPK3iweQC+0lbMVPPnfSDiMSbVvWAUf8NAjxDF5csnIUZaypYVo8xMhwDvjJ+8fln+PDuBd68/grTPCMzcEoZ0yAtfbmYepWcSNXNYPPlYYsb1DYms2zadVlhEnUNMrHF39JrBrxUiKWQklslfhgGjZ6UfqP31DYbkTmvDhsIdrhq77RU+huNZXO0iGkbwW/9PCs30vfKvSggyXrppHQZaGapvRW5iFlbLJ9HszFGfHB7qy291H8PfV56H8WwikMPrsMe2MA8FkUbHDCOk2JsLOurKl5LrDqoSXQGagXphAPZEzZwr+q8LM3MHMDksSQ5v+gBkK5lw7BJs7bcO7u2OLW5e4euugWSc3377h1ubq4bHYhR+zOsYlTluZVW2DF2g2UTW/ra9c0VvvziC9zd3arYCADjeQupe1NS5VbjYGwifJKmmMoFV9fXKg1IqsOge7AtEwUtNAoHAyUzLNMiguLOcp93Fxc4HI74Psc/SgT6/+6x2Ug24MxUbFoKBoM2O5a0JXJblGURgf3bwnnAIsQeCdVSn0UQxiE1nO9iv9/w5/pD3lKs2rnBnLoYLs9iOD3L/LKgmKIn6UyJ3sNB2gXXlLCmpN1DgiF9cHeL3/u93wNzxS9+/nNEH5RRIN9kEoCGQ9rRUnFmxYa4RVWn02nDqeskd3vP9ll0PHLTtABqmJ+8hlsab9GTRYv2e5sR1JxXGyFsjrC3XfYUj9pzsNtcS20bB+jMgxYhh9AjDWYxSrCUMm6+r0vwGR+4F6+o3Vu7FcymtG+RPzUj0a/ZzlKKbFzl801B63xeGgxgkAsg8AIDTVBlu25Ncb+28+r3xJ7f8XhA1mGE4uRUpT+X1rmWVfjFnG2pli85dby6tpkQhgEffvgC3oeWmXyTQN+fmWURVuQqDW7pDlSu7eNPPpUJBUBblzaem8iq/KoeS6YQ5TQgIZ13JFJ7QaezPtzfCySmxakmRrLJjLYTZBs/dWNrjCs7DBHf53gvDKisD9nIKScUpWEwd5Vr2ZRWtXPPcJitAbUZ77ZYtn8vRTw8qdybLf6i1b6yqfpZkaENKqNeXW7n/Q2IwIWIYZwa6dyD2n+BnPJAVTOUgcDASA6OgdOacdYqrfMOn754gV/+4ucAEf7i7/+T2M8zAIkGffAK7EuK+s0iWSN6axXWIgZZSAFxHMEQKMBEfks1NXnVBths5pyzRhTKn2TgdJIBMjEGzPNus6mt4ELPntOyLPqs7Jx6Km3RUWpD6TZGsxr3EK2Y4Z1/9p+lcYw+HA9EXfFdDaAZNLsXPvTBZPbz4/HQIhqLRAnqaNucezQn8azoxX1z+uA7Pc3oW5uilK13oWm5lh2Be4HGqHi28c1hyPOQa7u6uoaN5HBe0nx5fh6lSrdctT/V55dSkVKWzGfJ0vOfCioT1jXjtCx4/eYNlpQg20TFazRyE+MuAjyi6u4aPFGqfH6by6TnmY1p0DjUum91f1qk3aa+CvClLbemxSqygSAptN3e3cIYHiEI3zXE0BT3CZCOLl0zNqjR6HCQnFH20bZo8R2O98KAAoK1EVHrT9+Ec42CQLopSyk4n5cNftUNqz1IeZgdU82lPPPqbUqhvt6+ziJMQB7wqnQWi3rpz7jRLkTt8NEoRlN6hkyQrI1+JNVLD2AAIZJHSRUly5TDlBMCC3XkT/70T3B8esLd7a3giwQlUNdmrAQT6wWVqjSYbtxdS7mdsy4c9dKtX33jlQEVz+ggfI+YjCsoWGqPNLvXt0cAiCKTzUgySyhjmfvhtV2SnKXBvQjnvehMgnXYHAOPT4+CXZcMwzVDCIghts+VYpzc51K0Y0Xxzy2Rv5aqIh5ipAzfZVt7mj7bey0D6AwRFVpR4yYVaqnq5pw2AQDj8PS0idY7NlxK6cbVnOHmmZjBtN+3Kaf6XB4fnzrdj7YNJ/p3a3xgiyJFO9XaRYmgjkTuHIOwv7jE0+GIZVlxPJ1xXlasq4iGn9aMJVeUSjguK5Y1Y0kVmR3gApg8cgXYBbDzMueplIaVkvOqHeA0q8hos5lqbevErr8VfvSeOCJ1ph7jNIoQdBBtCSLhOIuuhG+Zja1LrzziPpXBdwf4PY73Q0xkc/rMNn2Te+cDdwdRq8z8nqZJiiUWAXzDsNnPtulqIxer9JhwIPv3duK269FGPzOgmT/qRF7g2XfnItML8+kkWGUtIiQBNINkaTEABCIU3nL91HhwwTyN+OHv/hi//tM/hQNjCpJuhhDgIPSrEKzFrkfiJlThlH9nFCQxngGlrM/SaLv2oviVLTbhToreY4wDckrwGz5nq4pDSNqds6v3SPE9q/CbRJs9i5RWRBUhac9Yozivyu/WDdaoU8pjDPvQioj2/qyYHAFg2vRg2zNiw+86Gd7GmUiPi8M0TQ377BFmF50xowyIQY/BHISmoJoJObLIitr33d7eNhjDIIaGEW8Mp0XQdt7S2RVgA/sqxNAvyyKK67tZjWNRhoAVYis8bQtlHQcFEbhkDTSkYFe4yl1Q/uzl1ZXI7fluiKQRQFoiE6rySaVo6VKS6NA5FB3PYWtpiJYiMx4eHzFqZmcBAIia8Hht+GnvCJQOw/rMoTdHRK61HQObLNW7XrhVOp+t38LGDCnK3f1+cnbvTQRqQLd5niZVppiIRTDkJI1MKT2LfiylYogR6m171B5Sq+BrKgLtRTesx1L4VIr8XPIMtH5xM6iEhh1hU5wRhaCCn/7+XwCMT0gmQ0cglv/0xyJDRkBUw7pWEbIohREC8Luffow//dmfINeMOMambpRyFp1HpyRsR8oJVANZufU/N88NSbtSWtXY1paaEp4bL0urTcHGBn8ZP3SbBluq27BHKD+P5L7JtdLG2OptARA1WjcjQhBj4VV9vhkSNqpThXPA/uIKpVYMQTQnLaqYphHBi+Av2vnIM6osjFKBK7g5Iuu08loA4Vzw9PjY8EOn4tpmNJm7s4shiuH1isGppJyJLG8pXcxASkLa59pbVllhJKuWS5eP/F6i9azXX1s3joieWFQv6agIaygkYc7ZDDSoKRcJqd4ExgPARmXT/3RdmDGLccCaJEJMNi5kI09ozz74iBAHeBcEJvMRFR65ECo8jkvG02nFmhnjtMOrr9+oqLdrxbkKKRWzBhWlaVlYUU7k9mqpKiMJ8ZQsGHbwokNq4YwV6yxylS43hvOClxNzm6DqN/zf73K8JwZUjsZFbAD+prABbDYuoDi+Lj5uRtQU2gFoS5tFWZ1PZgbTqrQtZVMD0BA1NpI0NELqqdHzCpL8XahXjGm3V+xIf02bZ63ftS1wOZKCUy0Vx2XRzSfn99FHH4GLCEwsywrvA7wLMCoJ9B4UjWy6gRNjYViTXXtKuYloMHeqj7VFGn4p6WtBo3+RYlsbqTdLu0otKNlUzZ2m0nKf7JkCzyNCESlRgF+NZIixvcbSKouY2sgK6gU+u/fWJ23XnnXu0VY4ozsHvd+6aS1CttSaCdjt961tV+ZtsXY6deqUpbwE6LgQtAdu58bc32dCHeu6tPO35/dN/NoyIWYG+QA4r105YkR9FCEY6LNisAjenE9d4k2Le3ZPDAMGyTqzaJyoyxOaM2wFqCrYZdZGgG74dXy2FbmqZAXrKkI7q95Li+ytAQMQA5hSxYsXH+HVV29R4ESg2jvkymDFW8kHuXaw8KtJ5zLVzl3tmrTaAq57u9j6J9oEOlr0a8pfGmyh0/y+z/FepPCsWGRKK6zD5ZvXIxsHyGvvmulYJVBQmjEkdt2QMpR6Aag70vfUToliS86p/c7SmBgj6Ly2xWWpsr12G4VJC6cA2s57mcSpp2kPy37gdXM7xdZGIixLwjkEHE8iUuHI4+byCi9TBk0TxnnWtBYaefSeceecjKZgJWFDvlCGjnFrVMg5Iw4SOVVmQGk7ZqRsbIg5HukuKiiVEYehCQwb1chU5G2sst4lNQK5GWZLG72lwV42v23uYEIcGtHV2ivnW0MKSFq8LmeQzbqxzZVqk20zp7iFL6wxAKBG+pesgbWlT52E7zoH1vFCZHOOoGl67a2bRSNG7qlnW7egphxWUkGMA+7v73F7e/sM+unwUtGvkPtFZoA9obDXQXabtFYwFxAD4zS3SN97guMNfc82GdDOx/aOyPx1TVgz8KQeYr/f482bN7i5uRGHb8+Cn1/jN/frlhsrAwszzhrILClhmHf49ctXuLq6BLWsDoiKbzrnEBSzJyLIcEYRZyksUBuIVdleVNMcCd46T7JWLYtpOGrtgiY2ufZ7A6B4TwyopUhhGGUzRd8oJjbQDEAXgQW3NAXolVqn5PCi9B4QNU/ZjIrr+KULUrwAdb6ZYbCOpCocwrCho1DD16xIQQq8S0ouupzyObXjq2TRCASRgMS5DIBYUvnJB6x5Qa3AsmYQE2JwePn5r7Hf7TCNE54Oj1p1lhbHWmvTuDTBC7sGWxiGqTo1LLlURIiBkvvjGz6Zc1YqUX1WyTeBF9mMYbPJfMsSzN/FEEFEbTTDttgBNUgSlQCARTXPpdLMeHdMzxxFXws5ZwwxKF+za5lKCsrNsLdoVjE8EGTMNHNTmi+1qoC2TPKsmiIHH+BAm5ZRPHv+IDQaj0CsnXa1TfXBQIW2WHLFOE7NsFn30jZLsDVYKyMvj7i8vMFaisw7V6NGumZLLQINaRD8+PSE/W4H4W5SayzwzvdmlNosn3ynNmCAqEW/bW9C1tDt3R2C91iWBWywi32A61G0Oayt8bSs0ehQAvcABIfb2zs8HY6yr7VlmIPHWiqIRd6v1kWGQTggOgkcgpdilaxPdZi1gp1DCBGn87m18MpeMK42taygWjfL9ll9x+O9MKAtFa/SR61dckLc5qAGR6TJTOOzav9t1ciJFTQntseqBGo879rhtrnQ2v/MANhNLIVRkZvOpqX3RmGRooIiNsZVBSOXhBC99jFXsBoNsMiBEUl0DP0ctAcMRAfMPmBlRkkZVWfJLKuIWRxPR8Hc1LCQswmaknba9QhW2Y2gVHmtiaArHDXyuZfWvsosLX6AYn8kOoxKXK9VBWn18yQylSguZVNKclh1njvrgxU7rsYBltYJpkHOSVeKGklpGeTWl8xsz0iMhqXjBCnoWBVbCOm+RYbeOwSiJtIBGEvASNdKMtf/mWjNMw0G/Zxaax9TTRaVQqJDbDiZbRNWnUCgkTnJlcs+FcMUozQTRJAKnGRhRuQM7wPevH2H0yrO7PpyxuPphGmakInw1dv7lsk4J1HmGIPM9qkV836nO6rLvrHCMQCe30tQ51nT84Jrg34UdgI7PDw+Yp7nFnlaB5i+SZ63pfm1w28NJlNnLNV8HUGzeJxzwuP5DCiWHELQcckewzCKw6tFilwFKEsGIbcomMjwcsHDY5CAoSySbXlngRLpnoTAAjZZwRpHvsfxXhhQO4i6IERLM1gKQuu6KsUEAHqBweE5iZz7rsXWqWwxHqBjWwU2ewmw5m0iIYsPWqXTk3uGesr3d49q2oQ5FSxrQq7AqGGKVHihG6vDCcTA6ln4oCCM5AWwnyJqlahKhrqVZ9igkb8JQo/yTkdREKFSL1xYFdmMpXMOw9AjEanWZ6uWdYpTrXg+TpekoFRKK+7Z+FlLscU49fbIzZ1v962lc7D+bPmZGH4GhQDWha5YSetWauMa5JMw7/ZodCIdGFZqV1yqqBoNWfGrNoNGm4LWVsBmGzXudrvWDWfPH7DiTk8HrbPFYBB7sK5Vv/Hse+zePD484PbugyZ0YSI59/f3OC/SUkze4ZAy5hDw9u07IAzgYdC1rc+ZCEtlrIcDLnczpnHE47t3uLy40EF1To173wfbPbGdAhCjFbA2xlMxROdkCNvbt29xe3vbMjaTUHyuEKaLuz1zHajnCCVLX/7T4SBrlQiVHFyMYEeChwaPQ8oYGDilhPV8xjxE7KYJ4D6i25yD3XbWICwlApE836fjQeEw7vQvR9otJo04rn7DWHyH470woJYOSTWWdBTr2lK7oko/VgByqpdoIyysQvwstwKeLVjLnS3l2qZMgpFqmkESIZnid8NfDCfcRCkW3VgEuCwrhnHE69dvUJUt4J3T6rue1TfKdqHIQ3AEuCARnLSTMnxl5LS24krSiqydT6sybvq2t0Ub2+Db0RogAuf+OhnPmzs1p0hULjQvpZXU3sliy8ze29Ju7i2VXknRptRj99+Kg8M4oKbacEWLpo1ITkBrbezPj1tl3jZ4k9kDNF2XrKEko6ltcD7fR1p4J45JnmltffxWULKhdCJxt2/n5LxHWpNyCAlSFDFAZpOicmeRtIzJeLVOYIi72ztZH9pVw1ogWtYkjQ6aFSQnUotl2gHTiOq88B29U5EaD04ZvjIWLgiVsbu6wuvXr3FzJWyFcRiQ0qKMgy2nVyab2trxqgRvykZFmyykMCf1ibvbGxwOB8yz4K0OkESMVNBbHX+bT6/PnlxVZkvB/eMDChHgB1AMcCGgDgHsHCh6nFn2w1oJPjMQPE6l4PjmDTwIl/td1wfewENgZV7oHuNaQcFjzQnDMKEUfiYCLvuhR7Lf53gvDCjrJrVqW6nC1wLQCj0t+vJ9BIIJ2hq3UzAyoBeLjGSv6Q45FBRsvbB8/3MeZqkFwVsxpWibncq+uZ7yO5IiU9HiRCmyqb94+RIAI0OKN9GZXqEOndOkthLgKqE6hyU4HErGCnRhEWZENUqFOqk9q5iy07TVjKBXqpVF5vyNqMoW3LaCygylfyh/sBSJmTUAFW9f22awyr5TfVSrfJLiYM8oSB01bM7RkcOyLK1hwirGVV9jsEy16Z/NCTrtYNKKay1woTu3UotoBmjUtawrYhzk3Gtp406MME+wNRF6lZo7jzal1LqQLPospSKqIzdHZvxSw9Jascybp+yFOTMuRSleT4+PuL29lXVKNn6joBbAxwGlZrgYkbwD5gF1CKDgUaP25ldGYUIYI5ArQq14eHrCxTDg5u5OR2irtgN0KGHtbAuv/FNrNzWHNcSIUguG6FugYPKDpWQMQaZqMkPZC9SmhrJGntykAmtX/CKH+2VFAQE+gn0AhgE8BmAcwMEhOwIFzQoqgZIH5YByXuFLALhqdmiGf3PfmXX9SIFTmI+ihnU6n5U2B4VeVKCoqnP9xhCeb3u8FwYUkIsSQmwWUrZWOZd1kUq4PWgTk6CehjF6imRV356yyPq3TdKECgxH30Sj7Vz0ZyF4MLxGDaVFbd57ZP08ECFo8cYH8WRpXWVyZ8M4RUXJk4OQhrQtrwKP+ieKE31GGB2j6qJEgyPEmUjhyGg42+4Mr6mpGRkz9j2Vj00g2K5zXRdcXV+LajocXHQdF2YV5HBOxB/UOA7jIHijFmAGreqva2rRHBGhpNTgFDEgQKHeoODQDVCPM7dG23iAhs+ZMIg4siFG6RRTZ7csS4uavGYQ5Gy0sd7ITahhM5ZijO2+bZWnghdh6hCEe9ixQ6gj0mhYKZFEZqCpDeDbFlW8k+q+OMUiqbBFfaUixgE+DmAQqiP4YUKdRtQxoO4j6uxRB4eqLagRDnkpmGrEDg4fTBd4+8UXePj6NXYhoK4rri4uRH2IXNMhkPORSrak1RKwzNMMcoyiWqIMgGoGeS/7rlYEbYvkWjAOI4bowCzTQ9twR0jBVERq1DgXj0qEt09PMqo4DsA4oO4G8BThriakAJQAwUKJwGvBkAL8qoLh3oNWEdcmGacKo7C1BwCAaxcRr0Uw7nGIOJ1OGKexwXeOzIHk/39HoIBGobLihQheDHAX3LN3h3CLPC3laAubJZXrbYW+fXbHrf7h6FM+93naK3OZGJVlzgraaAqJwjyLsk7OGV6nA6IySlVNR/Yg3YwVgk1mAIvOPW84lvZyO/WKw+BxXhM4A7wf8OLTT/Hqs88aFcdI0bn22eeWcloVndT4WSGgY3XUOkdAIswQOWA5n4Qy1IoJZpgl4qqlgoJsEO+BnBLGcRQyNwAo5FGyYNXzNHXRD5ZqceXaEJbGk1RMkVWNh/T5Wu94wxs1VSbnJKrQqPfp6Qm7ecaynDHvdsoUYHBhDNOAlDJCiJ1+tFlrWdWZpLdbrrmjYGJUzHBzdc2JbmlJ0Ei9CVAbrQhW8YUWhnwbi2HpvTmVp6dH7OZZGzWA3W7G43lFJQAhAKMYzjwH5AuHMhBqkIi/MCOMAeupIMLjzemAUymoLuBcGDcXl1hzwjyMKCWpfTGcWrqOmIEwjKi14nA6wxmEqWF6DB4ODOfQHJH3HuyAz1++xA9+8ALeBfUi3J6hU2pVsL53J6LKp/MJLs5AcODowWNAmTzK3mMNQAkVNDhU8sDqQAsDS4BnB0cBxB4oZ4wxIKcCR6wq/+J0G1SikahEmlJoneYJ9/f32O8vlP/cM9vNw/9Ox/tjQKU01ugzBFnk1iki9AMjWPdFbJV1YJOK62uwWeRbQJ9hxraVP9qmNgGOrMaBoRxH7p9fWtVZzmk5nzAO46YbyCOEiPPpKNEyK+3qNwDVZGmsijK8+ORTnF6/Ao8Rx2XB7CcApOo4pTsO7sIY1vduPzcscgt91FqxLOf2OaZUMwx9LAOc6XdKgl4Vn6uavvog99VSXJuOWWvF+Sw466hUtEY615TOkddRIK7BH3a/rZqOhkPreeh0yIZrM1q7qU2rnOdJ20yzRnlesGTF0HNKipN17Npp77RhxpL2byrqupbgCMv5jHgpoz68zYNSH7wlzJMWuewzDG541gW3WWfWbnt7c6MYtEPhit3FBVZ/wrkSeBxQ54iyc1ivnBjQCagKmpdKQPXw84icPA7rGX6aMBQA5zNO64JZszmJfnv0TDrig7WlsWRpx6ylNqqZcw7LImwTw50lm5OI8PrmA3z56jU+enHXom0itLlfvNmvzjsk1rZJR+DoUaNHmTzyhcc6ZaSJUCaCCwxHFVgr6tkhOwA1gJcFu4sZY3XY7WfESshpUYiCtQXU8FjZb6VUIeRrNnO5n3B4OuLictdw9H+U470xoFucyFSYum4hYDE2UVevb5Gn4oMW8Zj3tPzX7BZ1myqfaAtcF0wjHVdJ30suzTPZBleT3HmqEI97Pp9h7aY2vnWcd4je4+nhLdgu4ZkN7XxWORcxzh/9+C+A6wG7EODrulm8GywTvXPLjID9fNuh1N/rRISEsww0y1n6kWVHtAJOrV3A1zaKRedJKVXeS4FMKCulGYWsPfPSC2733obcuWY0t+hog1DUoRl30wov3QjVphzvoxScJPWuDW8UeEecRoxxM2KXsK2eAyZ5J1DKMEifPznhrw7DKPeoZMy7uUMMFqiQFa3kGXY+bM92DHdvRsSi8E3G49UR3T884FqxUBcDjq9P8BeX4OBQo0OdPMoI5BkoE6NEgEjwO1eExbEyYxgc4KXAFOMAShVwwigZQ+/1bkUrdaRFe/k9EWIYG95t7AWAFROmNoiwlAIH4OrqajN/C+0eO7KcQsFIAF4H1VmlnaNDHghrZKSRkCZGmSoo1sZtJSaEXBFWwEWHtApVKwSHcpKo2hoGCKRMl43yExjeSaFYevMDwvUlvnr9Bi9efIhi/NDfENx8m+M9MaCy8ko1Ibhu9DpdYVs0cu1tZjzt/1jFIbYRZo9CzeD2rzV1neBDS4VNNEIENiy9t93TQ37BiiQSHXwENb1OOfwwAqi4u73F23fvWgDQUiT0j3VgoGYcHt7i4atX+OjDS7gw41ir4rrUsDkTWTEDWqt1pfR71qJzxaBiDFjXBeM4oNaCIQ5IOT/TWrVquwk+V2YdY9wdWS4ZpWgEBa02f8NoheCbcnrwXvmjIpQtmprGixRBjaKbUriKrLg1YxvZNfK3A3ISHQCpkNfWLQTqwiKevTZWULvv3aH0rh9zJkGnVYYQMA4DDsdj61o6nU8Yx1FHqQBWQLJ7DXATbWHuGgydNdLXTYMFuM9kuru7U7UiZSV4D/ZODOhAqJFQJ0KeKsoM8KCC04WwrgVurUqbSqC8YswJeVnBpyP8fsYUB5CJLDJa1GWD1GywYi0F8Lmty1pYRaMr4jTKNY1Dc0bSwsr46quv8PEnH8v8pRaVy32GagmYNsTdhx/g9dOC6ggcFJKIQBoZeWSUuYgTgES0a8lAJqRDgXMFkTMur++A8xnkpbvLOYKDB5E1vIhqmXMeftDOxgAkLaS54PHhRz/A49MTLi/2qKXq+O/vfrw3vfA2VRFAM2SAjH+1BSmk+a5g8yy0VDNrxs1I0kDPXCyNwsaQgYR8m0tuLXusqXipjBgDpLzDZj9lHVbAOTmXUitSyVjXkyjc1CICrWqrHRdc7Sb4ZjMVZiACuQHOBTiqiJEwDDI2wxeHDwueUwAAIABJREFU0+FB8cEqA8kqQJURHADIgDHrZbfb0FPRFoIDQNNCNVqRSNEJXYpg+pyqVJMLuFREHzTN42Zo/Sba7DGG3DMz9CnJmAtjEMht4/Z7eb+0kGYlj1sRx+bbiyPz7XrIAc4rVkkAWBa9iH6UJr83xNigAAfBIEstUtG2rpwmFtwHvBXVRgWgQh1S1IyhU+qsX1qUnKpGcfI5pgT0rPMKQJsMSq4Vi0S8QpwKMVCScEGjwkDGFIATsZDqCEXkvKAjC0CBwa6iICPVgqWsyLWAncAcUoStSGsCWDqUUi7yGgKY0PRgrRAJxQ0Np80lI+e0GbGTAbBMJYV+T2V89MMf4md//DPVH9XIeFNvYGa4LEHI9dUeRAXwEVUHK1bHYAdwgCmPy3U44WlXEAoITBXXt3tM06TOUSbJsgW5JHq3uVYwBRzXgj/+xef4o1+8xC+/eIeXrx/w1f0Tvr4/4M05Y3UTvnzzhMzOdvd3Pt6TCNQiTYkcg2FiapyMRF5ZpgoagdrECtpn9A97hsEAaJGiRDhbtXFGRUVOWav91hWlrzEjrP/XbLadMwmeljWlKSyalenpCfP+AlwzxkFec7H3bdKmpXLRV+x3I378T/wUzBWfv/wCKyc8LGekdcWHlxeo6OT5SiTjYskKGq6Jx1buVeRGtNYU2Km8V9/cEgM7bQWVaN7YCdK8EAib+6zXrkWhBp9AKs5ScRVjHIJvRmccR+GhOpn1bQB/jEGxUNe6ZEIIqLk0HmnvKdMaBUmmYNdVCmNdz9jv53Y/rYXUBFO89yrTRtql5FsWY1FuKb3LqhahyfQppAWXV1eNB2sCx4a1e7IiXnfUJme3hSB8iJsoXjm06On86bzgchwad9YiXfnSCqrU/klVLCBXAMVSK6USFZ18qd8tfOaKy8tLGH4vrA2HCuVUV8ksiJxmE7Xh+4wKFG1Vto6tzZ6qqCjLik8//R0sy4J5msUoV4IxZZz3yMrkePjya0xwONQC4tCeL1WWKSRMQOU2J4lYKv4eMr/ok8sP8PjqDQbuzSINYyFCYeDt4xGvvvoFaBgQpx3mixkuRMRRaExcGffnM1AZrlYcX7/B6L5fLPneGFArEJQi2pLSvSOLAq0bxcZ4+I1mIFqhwnyIda+Y9JcYui0uZRFvNzRCyJaIbBzHtikAqVYv62acA7bopXxfIBshq1aGgNPhCbtxaLQK7wieHCi6ZnjnMcA7QiDRZry7uZSpmUSgecbT6YDL3YTT+QxUIZgflzPWdcXF5QUGnW0uNtE1OpdFcTYjiMEIIbafA11gJPjQWjCJemHFsGGLxgwzI9cjN2vds6KIGVWgaxT44BHjIGpBFuko5tqEbtVgeS/GPqXUiN/PWgb18D4gRofD8QmilG/3nvT8pEda+J+ueUBT3hE9BMHqSi3gpO2hCovYHHjrZmqXxTpyBmiK91a8Y4hhImiVv9F6oMWsfg3yWvmdDxE3+z3ePtxj3l9innc41gqUCqoVrjr4TPAJYIUqmABaASTAZQISA5nFoFZJkbwT3VAPyTi2RH4Kcres401iil6UlfiB2qYyLYWmmAYGUNSwSkfb8XhWuEicxZapADB8JcTCuBhnHHIFFcAVgDLBFcAlBq8E9hKZu0RwmeEy42qacBf3oMLYTTPK8dQdO6xYSVhywas396BhRpgnhHFCjQE1ONDoMYyDwBJph+PjE5AYVCtOy/lbGqrnx7cyoET0cwCPkNm+mZn/gIjuAPzXAH4M4OcA/lVmfkuyYv4OgL8C4Ajg32bm/+23f4mA1jYGQdLCTZXdig3QIgmRTvKjNgLVRDvawRYlbqKD1nXUixe2UZxziGNs0ZNXYed5nnFeH/WcOgYbjGKl74dn1CwVQbAJUyQAY/PqwQX839S9y4t1aZbe93sve+9ziYjvkpmVlVndVeo2wp4JhHHLYIyMwCBPNDEaWhaCnsjCQzX+CzSVJ8I9MRLYGCMQ0kAIC4GHEsL4grHVraq+ZWVdvvzyu0Wcy97vzYO11rt3ZF06M+U26V1ERX4nTsQ5Z+93r3et53nWs0IQaCIGESSn5UpKmWnaczw+4XC6shRxb5rGiSE4ZhqXy4VTEdZ0GCcu14XzZdHeccfTp082RNg6OK01kYPN87WL3mMMnZSxzEzgYde7kQR39j0b9GpIQXH9+lgWYH3FtvnIupErucWZh2Fgnq89eBZlxoNexKrYmcnXbG1IguG0e0Z0lA3Hfn/Q8y9wSloWJSDWgWw2ZNDWkGWQMQTSsnTzmFoyIumXgi6q9wCtcrlcpftGe9a3RKJ5HwBkazXEMjnPsuhwO7Sc9UHaNV0FFyEM/MqHH/Hsgw95/eYVD/MFXyotF+I1cXP3lPsl83CpUBotKTyQICRHeCj4c2NIjZArJIGRPvzgW7ScuktXw3F+ODGMMrTRGafgdea79zqTywQZ6/hpez6K9doIYufFtBsnLvZV15t61Sj01Iit8eknnxB8pLqF9569z+ucyNdGjI16QtzDiqMGybJ9huMSOLTADQPtkhjGibwkpkHanc9q2ed9IDfHH/zRp6IxjSLOr/sJ9iMleso+UvYDgUCrgTDeku5nfK5Ew9e+4vFVMtD/qLX2cvPv3wL+WWvtbznnfkv//TeBvwj8af36DeDv6Pdfekgfeeru1xY0e2dB27hpY6W1Blm/Avobfmi7gdqrAKs8pvfz1qpmq3RCQQiAomSS2V6tmE5z+twhatkvAeTu9pb7+7dadlaGaVTc1SQ6CpB7xxAD+MBSGi8+e8WrN28p1fP+B+/x9vVLkZGEgY++8yvcn19zmWVcbZ6vBA0QKQm21zM5zRRsfrvZxIkPpedwGCml6E2Nfn6DNVzPNotpHp1mb57u+r0+V050bZWUWsc5nbOMx/dytOhMoZxTt8RLm26j1DdL1mwPFdUriWdse1k2jRTOGHUNxDFiM51ANw59PSv9DZ5Z0tJBApEdKTappbVhtt573bSEBBvHsZNQKykZ1iCktXbRUj0oWWPr2TmdhOmguciwO3K+zAxF+sT300ROmSUXmB3xmhlb4xCDdB8l0ev6AmFphGsjXht+LrhcOex2TOyIwdOIKuURsmy/3/c1UdqGbFScU06d6/fW+n29qdqGRshJXbKaYNTRRx4eTtzc3Sp+LuN0XvzBH8EQKE71wKVwHHek5Uq7Omp0uNqoBaqH6AOhOnYXGJYCaaHWgdP5LWFO1FaJzrOfdiQv45o/f/GK6jx4cNGTY6AMjraPtNHDwZNGGadc5iJBukT8XNmN/x+MNf7C8ZeAP6///XeRccd/Ux//e03O+j93zj11zn3UWvvxL/tjKakIXNlYK32srVGwtra56YWpBXFPcppm2oU1xyXD+gyz6/ZjSOAIIajwvUFbh5CtYlwNpLXJhXGaXdiCC14zFSBGvIfdfuJ0mWWAnLZbApIigwB6RAiRz169YtodOF0TSwmksvBwmZmz6EJDqTycT5wvFxpRg0wlZ8fhuKfWypO7A3c3Mn97N406hsNG8soNYMFqnlccF+gC8daUo9ANSwwY1FtSWCvGcdcxQ5tR5ePaFhpCZJomLper4n6BGC0jbSKE9o5p2ulm4nt3mPz9aTWw7lhtU9/NzetsOq4MYzXa0DweTYFQ0oKnUbKU0Hbt5e+sxsXS5QYhOJYlMQ4jBM/1chWIRk1WbPSJbfLWw28mv7i1cSD2rFf70U1bXKW8zkjXVs0zr17/lOv1AiFQnCPs9gzBU1PEnTJ+aUwl4CPU0IjjgC+wPFy4cwMRzxA8xVXGlnh+d4PPhWv3PXWKeXpev37F3d0TOcfQ1+fWdX9dO0ATmMEkc3jdEHsyQycPnXMcbm64v39gdzgw4Pj8k0/AB5w1uOSF+vaB8PQZOxzxUpjcgKsRWqA6WOYruzjiz5UhFdqcuH16JD+cefPZCwbnGYdIjAP7aSTuBl69/QNaiLg4UqeJehiph0i+gbKDtoc2FLyruLERd57oA+Nx4m7Y/bLw9AuPLxtAG/A/OYlY/01r7beBDzdB8SfAh/rf3wE+2fzuD/WxRwHUOfebwG8CTG6vvbVytYY4PNJ6Ar3UNOwthJVpfqSLNGJHfqmXIPKaDUekC+k1q5K+eyVVDD/Vst/+/ez5Ex4ermJQ0RzeS7BOy8Ki+shpmLgsC/vdyG6aePv2nbC/OLxrStI0qg/cXxMvfvhTxvGAP78jxMrhOLLfPwPn2B0O7KY9tWTevXvH3d0TllRYUmLwI63APIuIeF4yn79+R85FZvpUCWbmmOQUU9zrZE/LMuTGl17lMDhyyr15wbwJGnrDa2VgCgnLbm0OeIyDZraS7S3LwvF47AHSBPE2O1yUCK3fpN7r8Dfo0ihzGl/f62q5BkI6pSUpSabuU4g42qvBM5pBplzwYXX4agiG6eOqPAhBhsGZd2bJNi9HyJjz+czNzW1fT4MZPhth6aUSSMsCTvBHe7/G8HsvxGhj3RBevnjBEKQJYU6JMXpcLYRxJNVGev2Ow+2RUiHsRvzgBTesDu8O5Evm+vZEeThzjIEPnj5hvlxwRWZaGaFk7/u95+8xLwshRsWAVxs7O8dbU5rOKejnzLnIPCfTT7dKq67DP63BOE20JfHy5UvaksGbEXqFlKj1gRBHDscbrkul5SsxT9RzofrG0UVayril4FJllwo//unvsiyFkAuokUyIgd1+T3FOupf8QBsibQiUXSQfPMuhkQ+Nsiu0UVqeY/HURVQOywx+mr5cJPzC8WUD6H/QWvvUOfct4J865/7V9oetteYsHfyShwbh3wa4DU/bVjvnlGyRJ1pusWE/2eKY9MegrXioPGlTwkvQ7ISTBsioJZ+GYraly1ZPKj3fI8uStMzcjLOV32LJC8UXbtyA945nz59yeTh1zNRK1Ms18XC9EscR6pnD8cDNzR3ej/z4R5+LmYLzgEABOcsoDxtH4PBUp3rHGAlxFJjDAy5QcqK1olmefDLBeGVQm+n3QpDhfCklUpIbSrLX1K+FkSRWsgqGOauYOvdyOw4STEsVtyPrLmrNSASBF4qK84sSbnbTOkfH0kTCtGpcQfrHbaKqsf72u9u+81or4zTJ1XTra8cQdEyHTi0NAacBsNTC2I1JJJj6tnYSOWTjMJ9SoBNt63SENds3P1ZrP94O+Us5iQTHS2CruXB7c6QsswZvce+iVlzJlFlY7F0p5GslEvCprBlvLri5sTycee+45/awJ89Xnfcj9nFm7m3vtbUm19h7Snt8Xz36HF6NtxWasZ+J+H5Ve/SpCGa+oufr+vot+XJl9EGKr9agOhyFwXvmh7eM4x4fA9cZ2nImTIHghJH3LvC973wXqudH//v/ygCUZRGeoTZZQ96RcqUqfkvwoqEdpHwvgyONUMZG2UMdKgWbST/ABGNtlPiVwte6Dr7Mk1prn+r3F8A/AP494KfOuY/0pH4EvNCnfwr86ubXf0Uf+4XHVjf3+IXpc3M6qdQvkeu/u849oo+wMJTGdIUmgzJn9T6DfjMPRuZnF50tJNhNaTJD+3y6iBt3a7RWejko71Pa3WpLjK2xnytjbpQ8M4yxZxrSTdOYcxatZCnc3N4yTTe8epv47PWZEnXQXRM6Q/6y6dSsZ9ytguVaqc3hwqgD5VYsU68ZtVadNikti+uuIucp56Qk0WoiMgyD7PA+cDweFQbIvVxd/QZEeH+5XPA+dNvBooJ5ew82u7xnk36V/3gvmKBtipLFiF4yxsA0jr3bxStG2zaVx+V8UUcdOfoM+GImyo9vDnOtMvchsyg0GZO0t7ZuYmOf83g4dHign8HNv60F2ft1XK7hjV1Kpq+di1ynh4d7HVWh0rxa8Q7GEIWwyZmQE+10ZrzO7C5XpktmPM3E+wvxPBPfvOHDceTXvvU+l88/4727W7yu56YTN3U19ARlHEdevXr1aCNym81r+7mtQpBrZR1YKxYqBuMyU97McCKedL5I8AQhfBVCoxZ8TuzSDG8/J57PHPye5+OR/bWwvyS+e3fHXanc/+RzhhpxSyY4B7UI2dcET08pcTlfuD8/QNfKemrwtAh5gDoiIv2pUqdCnjJ5qCxDoQyNFCG1PyEhvXPu6Jy7tf8G/mPg/wT+EfBX9Gl/BfiH+t//CPjPnBx/Dnj7x+Gf/WjavmndRlrygWWHa0ClZ1f0TMuCqWQxsntmM5h4lJmCEAdqyVYbNuSttkbBU5AFcX24ks5LD2pGGDQquYpxr0ELoTXG68wHfkAQFd2hm00N9aRcVWITaXjenjKfvz1TQCzdKiLb72xKw9nCdzrfGzOrcDTMMgxac8yp0FBCJQyAtLRJf7DvLjRDjN13NegNX0rWURljz7icg/v7B1pTM4miG5OXVte0pI0TVBYWXNYw0krnVD7l1LfUblivj2mp3FzvChKvydA3g1yMqLBWWikrRZJUicPAokHTMqctcy8OUQIBidWZCt/9inPn0pRNl0GCrYm/qzD4trYKb968ls035V6hZPWYjB1Ll0BsgnvbpJcs1m+4QCqmNhAjmmEQdcaw38nm2Aq+ibaYUvDLDNcLYV5wyxVfEm5JjDnz0XtPqcuFP/rB71OSNEHc3exlw5YbRSeSyvoRu8bAtz54v2+wgvt7+iRTI5JYW1HtHNhNZ3h4M3OVKptsqvDmxU+IfoXODHh3aBbaHJ7KPj3g37xgCp65jnz47T/F+0+esQem3Fhev6bM7yhL5fj0jo+/+2s8++g7XN1AiTuKG8is94yzgACiKHBoIlIprlBdpVJIbSHVRGqZ3DJ/knZ2HwL/QINVBP771to/cc79S+B/dM79NeAPgb+sz//HiITp+4iM6a/+sa+g10urBWXV5UY1QmlbUoIGz0bPnAwj3WZewkw7JZnoj+NEcC039hYCkDfji3QWlVppYYUDhu7+VPGl0ou3qvZqpbGbdrzOM+9aUd9GnXxZV9H2o8+wPSzKK5mDnYbNJiIwgNcTZYSI/JoPgZwy0xBXwgq6rMZmYkv2piRICHruWp81LyWxp7RELSKlCjFwuVz679jQt6otiF1TiwWtQWVK8nnHcSR7GTl8Op20p3611WtV5hDtpomUFsZh7IbN3kugXViY1HrO5jR579nvDzKQUPNz66YxNjnpjHIhU4RlTzl1FYYEXMXOw6oyCCqzsqw7DgNH57smElg1wEqsGTzgtZe+Ndvgfde1FtUwl9rY7Q+c799x2I8E2+Rk95GsNWRyqfiSRVSeK9NuL51X0VOuF370Rz+U9ek8x7sjaVkYomRr4ZE2eDWWbk0meT48PPDkyVNZ9yHQSlvNVbxXs2TXIbR1Oqf+vDUR73tPCw5KpV2S+JOqR4DA3xqY0YBaG6NzBFfZhUB+9YLhg495eHPPYT9Rm2SZu/M73v3u77CkmR/84I/kXnDrPeG9ZxonkWuBznfSSq02/RLFQi2qF22OUDw+gV8qN2HP+DXnwv+xAbS19nvAn/k5j38O/IWf83gD/vrXeTPGxj4ScFtZqZnANviYvlCGc1kZ1N9HD6yGbYFewgbjYC5EawDS35S/UWVyppWBDkerkm1JyS7uOZZtxSidFg81sRQFtItlkGjwtXbV9f08fuUNduvc5n2pvAd5Xbsp+4fVv1VKkQVcEZiBlVH1eJXyaKfNGGWkSEoMcdDS3oNCGI5NaVoLvgUOhyM5JyndWul6T1RO1JpqZq9XtZ7zFuq5zrNggEkyXB8C83zV+UnrzZ305s1lxTmF3RW1Q61NyRlZE6UWZe4Fe0xp6XPaV8yyUXIljmaU4fpaMkKx1cYQRRcatC/eeenMEXcuyQbFjd4r6WXjguuqX3Z0dto8CFYsV2qHXCqlNOI4scwzh9unUvO4iq8i89lN4qNQq6fULJtFnNjtD+IFgCOnBWolDiPL9UJqhev5zN2TO/Hu9J6lVFrzK6asGTKKGT9//p5OLPBdzTAMsX9e2xBpJkWrjwxmGqpmcNJuOzjPm5efM7C6ltkKtc4rCeCOVpxIlsrMU0bS9S2zm7gkT95Fbp59xI9+/JLBRS5USnO4WsRSL6q8UW361vDcaLlArpAbIYFf5PlthlpElueyxy8QM2K0sny9Ev4b04kkox18B9utPDKA2tzqt/6fho0KWN96AN6ytrbbQ183vb3MgpmVrxbYqkOkO7lyGEZcg4x6ZZYipYENdNNMMLdGHTyOwAXwteruDXlZ8Pt9D3oheJK9ry2m1iOmbiDo59SfSTYB1ZkAHc1qK1Gz25IKMR7JSdoRcXSjlJREcRAUl3Nq67bouIY14/MdiwTJbGuTvmoT6g+jElcb7NQHx/U6Y4P+rA/cSmYj3q7zlUEzTIEGIujwv9FuXiceo+bbKb6c6gqVTKjvGIeReZ5Zlpnd3R0kdYUaRxGKF2GMm183XiPDchYj3TjI4MLVTEUUAWa4K+Wt6wF+OZ8ZxrGbpAC9o0nUBfKYeVQW7ZKKMXBNa794mq+STeKozRODo7WCp7I7TAzR4fC4VpnnC0uNLNeZhsAD92/ecJgmYgCiNAPsD0dev3lL09HQZj4jMI74nFoAtM9pZKhdN1sbpic2TbRsiNshjIgDlpZBvjZ+8sknPJlGOS+asdqsMcPCQbDQBUks/BBpFHbvXjCOT/jRuwv3uwPFT3yeGiFfSK7gK2r/6ChpYTcNRB8Fxgni/L+UKsRayoRroEVHHDyuOlx1lCDi/lgbY/IckiPd37PUP1kW/k/0cKAjX1lHZugiHIaRUnJnWH1YPRglS/VrV8QGzLexFWs3jpPSoTMo9t/at9zxVC1LQqS6ytykHzgotNC89HAHRSCrYpQ4x9AE2yleypTaLAhtiZsvfvgtISHfBT8Tp3vJBCRYfvTtDzmfzrx+fd/xOwvCrSGejmazZ62PbYUtJANMnRwR3C52kfkWXrBZ7VtvUZtNJWSJBKGinSl2TcTfD5oPffMyYb6V0zLWVwyIXYNxGJhtA9ANdFUDrD6WltkNMXC9zsg8o0FhgYdu0ZdK1mvqOx4sc33SOvpZs6FhHMhL6htyjOv7zkUMmc1o2nC/N2/e8O2PPhJyxTmKk4zZ8NQACgN4Pf/0rh+FCvFetJmtVvCDTEZ1jdoC0xDwMTKOAe/ANTEODtVxfXch+sb5OvP2zRv23/qA6B3DbqLWxjwn/DSSqq7PZMY8rW9W3bIwrGz75Xxht9/ptdfs0lmTReuQyTgOIgnzW7ik4pvn4dUbjqM4P5V+dzW5P3uWuFZe3jl89eS58nY54Slc05lLytDuuJRILDNLXhhcEDK1t203Ru9xyDkNteEH2agvucCc8VMkXBxDBFc9vkKM8pm/tdvxa9/5Hi+//wnHITCdv54v6DcjgDojjhDdnvPiEuR1R/aSCfgogKQEzrBmSH41y61l7YwY/NAHrhn1Ukvp7uuC41geqoGsZ7erKS6sWsRuEqG4bTDw2rJcjJlzUKSfP9faM7CGCKstHa5Uwd9a4fa4Y797wucvP2cYBp49e0JKC+/uH0gp8emnPxYwIeiNqS8jL9zUOd+RkrReSreLfB+GQTpFHpWUVk617oOZUlZIYuj96ZZlgnh7Ou9YchLzYpM4qc4yRs88r+WQBOCkG1llmibmZZZArG5K1m1kzHWtlcVMgBGplsEEOcnvDMPQR3EI8XSQQK/QQamCzUX9DOMwQE7iEB8FK4a18rFqYskiei9pwXrsd9MEbR0B8tG3P5KyNzw2crHsXSLHWv30DQbIxQb8efHYqpVWZxpyzmur+DCRqmPAsQuyQcZBXNlfLq9IJROc5/bmyKu3D6T5wtOnTzne3lGR6lU2cFkgrTZoOimhEw1Oh+7JdTkcjyxpIfiNGYpONLXgL5mrqEmCX522QgsUMqElfK1kt3WKkJZktyFp5Z6XTaYVbUZRid44BfbDyMvTiWuV6iv4SGlN/lIz8lSx66btok6CaAjSvDIviXIKuCJwQRwqZeeJY2A/DBxz4O3vfsrt4hhzpTzMXyt2fSMCKGgGqvZXXj0IzVzXcKWt0rSxit0lFmnr4jDoTl96liokhOIvKAatQcg7w7M2Inq/6ja3Au6toYLpI2ENQnb07G4I0v4YNuMcan30XBuHDI43b+45DWdSrix55nJ9IX3rw0hzofd0m5bc+1VBQGs4P1BrIQ6xZ56i8UyM2jsM67x40TKahjJqpu8Vv3QbY2Zlr5su2soGOqH30ptofhwkEMdB4QENGLjK5XLW7idhp23igA+BUVsl17ZLyS4sm14UlghB3J5wYvLsnGPaTWK+oWujd804bd2z60ftn9kMl7vu0XmaF32hVQbOea5XwW9308S8LLRWWHJiF/cii1OYQGbYS5lr68LWjq0LcNjEyqAMsZW28/XM8eYocNK4IxdkpEdu8julMV8uMvFgjNzdHFly4afLFYaBrCRWn4jpVmcnsewTHNZG4TRE9ROUZDyfztzc3GhgEoWDV8zduqtqXTu4TJ2Qcya2JvpU5DP5zRKXc9HvWmAlQ5PisguVGEWlck6FJRVQPe0XD/srpaqdndnm1YYrFe8Kh3FgqYU0y3lm8LQ6MJTGcE5877sf8eKTT/XcRsLPeZ0vc3wjAmhrZrYrfeg0sUVesaqk2JKygRth+jb7tAUjjwdstEQnUiStZCuDMkG02ZnJ89aAZ0ys3cQm9bAgVDc/g9VdyPDacdxplK9fCJ4qrXIgpRDIPCiHj4PcRMqYLgZwuw1r3f+O64sdJ2RWo4l2Ukt6M5NY21vp2Z5hxxZMfZDMw0pcUzY0xdNqqZ1ZB6DQ3Y9CkMmZ13nR9z334XdmOBx0AmjOhePxoFIhOS/X60yIIrZf3baaBmFhW0stzMuMOUFNu4mSxSD6spzUZal20k00inK+w0aM75xjv9/3TcYyxVLlvUbVoJYq6gXXYQj6FFQbFBd1QmXw6/Uxsb/pLgXGAFi7e3Cilx1ClP8uC3kJhDjycG6UvPDJpw94Cv+98uO3AAAgAElEQVTOr3+PXBIpFfY65toHxxQGvvOr3+t4tMMqMtPhbpo9HDgXNPuVz2LX0lXH8eamByWcJxBxGFkr11nmqctm6rTbq5XMm59+xsG4ik4/2f1twXzNSnOpxoT2Fu0lV865csmF6lfo5gvRAlO1Ppyv3N0eV+7Da0aVM6hEbADqkqF4cX+6VlwrDO8S+wVcLuSa+7X7qsc3IoCClEdBO2VMtBtUF2jtf+K63foHdlYWG0ZWxfxByjQdGgV9Ecvu6VV03TpTu9tpD7azvnn5vmaekqWa2N7bhUJ2b5PvbBeNc5blaqletIVS349Ii1yHR9v2q9qY2daDvf1NNBi47cs5JUC0+0hY59InnJYiHRuhrVKvrSysVihV3Om9DwzDIGy2l2mMsL0Wa0NDKQU/eHLK2jvvuKaZ6TCq25B1CXnNgHL3lDR2HXiUaZq7fC6ZGKRVr+QCXjJQIxajui8ty4LDMc+zwZSgnqSycdSu0ijLop6yMjRQ5FCu47wGAdVaaV4Hy3nJ1uIYGIeBVq+klHj27CnzosYoIUjWpRu7kJTWd+869iwNEVoBgZTiIVIbpPki8p3SaBSe3N4xDnd49wG7MTKnEy5Enr7/Afl6Ftbf+Z7N0xqlqdVj3+DWpELWVcM5qSaMIBM5m8gFvXc8PJy41XZV71YdqDlyGXlprlQ4yJcr+2GkVjGGDg0tuTdrfRsLddQGynU0J1BKLoXUvARPEHzzF5AHqo7m4eHEsye3kl0rtmwdVC0lGa9cAi4GXG44l8E1PvnXPyB6TyuZ/eFIyz8vWP/xx9dzEf0TOARvkl0x57JKRJrZ9If+PN+He9WVeNFFY7ZbduFEi61spjOTWIEJsg7SSulxyWaHMOyW8VksXDswjI20McSW5RkzLgvXoAYNj15Mhm0Ko7HU1hqHvoaG8EcL74sbctMF41rl7rjnww+e8/G3v8Wo3qY4mJeFcZx6l1VrUvZfr1dxK0c3HucZx2nThaKBU1sca6nc3txSS+3ByzYg5xwMDYbG7jiBb4yTI4w2OwlKtplDmXEc8cFrG63NElK4YUnkJTFGybKy+gyUUnpvutPzUlSs7pxUEeLHayx61SxsZZ5HzdxwYiBslcQwDNzc3FBqYdrtGHcTeIGDYgy4ELnOM6fLlVQKzsP5fNY5QTbmQ66lEJexNyykbrcGKVX1RVgtFFuRto3BO8qy0FwkxIm8ZNIiG0AulT/80WcsLfDx9/4U4zgozBKZ8yp0F5NlM8iJnfgyP4hhGJQIo3fjCR5e+sb4/NlzHh4e1CRcvBFSyWoVqVLDBq0UiAOVgaDKAEsw6wagbzgNpiZmb1SdwlqcV/tCJXGdmV+vWuWfEynszhBMucGLN+/EgCUIeBJbZXCNgUxo0kvPfMFdLtTLhTzPPNzf8+u//m9xvLmVKbfz18NAv0EBdJXP2Hfz6LRd1NjlrVh+/V2ZvWP2YRZcDSe1XdeE2esQtjXDWgkqpwus9izvizhnF7VvH2vrZzH81vSK28e35sCWkXY9ou7Mj3LStn1VW1z2c8kgZMyBZHClyDx5CyL2eZLOaa8Kjdi5QIN8Vrd8G9hmqbGd89PpJBWA9iDDunG5AaqvXNKFa77gBs/pdNaxF6aXlHM2z3OHVsZhtYaTja+qo3ntcrOUknRIjSPLLOTOsszknPVL5FWX80UybC+SehPz4xwpLaSU9BrJe9/tdn0zELhoVHwU/Wwr7ivvE6ZpojVYlsSbN29Y2zZX3LNns7aBmsb2C+u9VUeIgdevP5dAlaQpIS0Ll+tVNvogldZHH30kN3qpvbU3qcTLEg3nVryYZuO/10aUTvoEmSvvvDhmSTYqX/Myc3t3pxaAg06Yjd0bF5ySe1J651T6eWqbe8XrtbT7ojYlW5ut2EZoldAa1AKtkOtapX2VwznH/f0D5+sVvMdH8Z097CYOg2ci41MSVULOpPMVnwovf/QjpiCmJa38/5qFtxJiC35b5ul7BmekRddNYlq1lb0FenB0/blNL6KIhNexF6IpHIZ1rlDVTqMGHe+RWKXM5CaQNte6qW7XxuF6y6csmIYfAgUx0sUhM1v0/a8yJGUuNV5qMrqeI4v1j7JQecB6uWsthBB1QigUhRuu12tflNJxBCVLBrQodLKYq5GOdBZjiI22Uc/l1jyjVHXlwTFfFmSO/ID3kVYc+92B5ZoEl9Ms0jJIex27Vvv9nnmedTZVYFHv0et5lj7+XLjWq2Lia1CwTbXWpkbYOkZYuVqTRYmPp8iCUs5Mu4l5nrEpo2lJ7A/7zWBBcZHf70buH07YJAOZNe+JcVp9NTXQZxX8j+PYpyvEEJnnRXFBuWaWEdZUOV/fcnu7J2c4Hp8zF7kerVYulwvX+cLzZ087sfXy5ec0DWwCbXiCs9JVXMwsc3BONol5njtBZvI+sM169Sbo679lXr95w7OnTzWb1PVuGuzaaARqGAj4jq3bvQaCcZoFoRTjQHOEYOFz3fyrg+qcBtmVaGpfYPN/4dHk79+fZ65Lhpr5+KMPOe5Hohcrw9RuebcsuOAoDydaTnzy+3/Is+fPJKn5ymFbjm9EAAXJFjpe0+hsq8loLJBug6jhkiuWp2ywZQD1MbP2aJaLauH6DHi39p8XBeTrJpi31roBbT++kJ0azmS4pvMDwxhoRRzujd2uiwi4JZCEdcTEmhA/jpP2782DDhtLIniTTdi04GKEluBVupBtE9KsCcSSzbIs6wAyKKHRHjkJBc0GvQqqaUKcpJSIbmCcpIe+1srn717zvvZam48qVeeOuyYGIOqlCXC5XqRHfxg30z2jyJZq4XA4yPWNj7HYWuF4PJBSYllGJdQawyjD5bpFn5avVaVNTpsSxmHoG8F8nYnDgGuNVFbZzhCjnEMltrY4ss1gyqqbTSmRzmu3llyjgHcRb+oCZJ5TDIarOuIwkcqqHLD7IUYh7tI8U1rj7bt7ntwcuM5XxmEglyYNZKpVXe8H1ZwqzCFrRiViKa3O/mltjrBEoLTapxvEQbgEkakprhs8qa0Tazsev5HJrNaSmoToKl7UcWsYR4oRfPq+S1rY9up9pVzUKQzYRK3z05eveDN4vvsrHzPsRtJcefPmLe998C3CEebLpTdQiM736xXj35gAupa1wuBt/Qgty+ytg/AIkzSJhhEV9jtbGYnp87YlbYcAsB0R/bdCBLQu9/gZAHJzbE2fHXSyygcVbVctfWLk4+98zO//3u/Tl4jhnjYuo7Wffamf5xSomWpQyVdrtd80NqK3ITeQdXNp57zIjlxcu06C43q5Mk5STlt2aD9Pi3iJmrjeyvzdbsf1emV/2Pe5RWlJDOPI02fPyOqW75wjGJ7IY9ImI9crpYSLjnmZNRjA9SQtoTnlHqiijvPYQjnv3r0jRHGMN12orSlTUIzjSFoE5xIZ1KytoUKOJO3GIhdySpgFnAVwXwI7zVr1qlNr5eF04tnzZ5g5t2GN85IQIrniiJzPC0GhAtHOVi7nNwzDjlqdzhESjNTp57zOF3WSlxJ9nheOxxu+/dGH/PCP/nDtlvJBDVDWfvWqki70GtrMK2uR7Y0YIaxEnVsDl8MxTAM20NEPitMDpTpyU4Pmjfzn0bhtrQJWmEw7+BCSaCkyWTS4dQZV7pib3R6bjOKXHut97HD4cUeqlVYCv//pS5o2otSl8PD6DeN+5P333yPl1Mmo+kvu7192fCMCqBEbkmHWL2A3K9Nuu2QPpFuQ3MroZkKRx6U+/bHVWMF+vuKQKImzvp+O6/SAR18UW7jBfma7bakN1zLNRfywF+KgSQOASTia7gLrDKEvLpb+4v0TGHPfXMFVWfjTFLuDfK2rqUVtjZxS18Ya4RF8VNu+0rWQRTPHrvmEbjLsfejEAtC/11Y5HCUzROVSQxzYjRPX67XjyMPgWa5Sil8uZ7z3TNNOHNgR4+FpNxF8EEH3IsGsTEV8QLOxy7IODgcJBNfLRbI11ZDWUiQDUbwu59Upalnm3gq6LDPgyE1+Ltho02C9dOx4HAZl6QUCmK/XHriLltp3NzfKnOs4aOScy/tORC8+pCGq61G/7pUhyqY5TPvVk7Y6wgBVfRdckx79l69fc/vsGdcsWHKMg2wIrvZrYn35Fkga4g9g51N4Aq3i2qbrTz87jb6eSy3UuTIvC7e3t/gwiG2i9+RZG1jUWap6j6+F5sQfYmgZWuz3lOgKUALN0/ygIubcMf/WUDeh7dJ3Xy5+st7nDbpPqPNGGst5nHYT8+XM4bDj/W99wE9+/BM5x1+oVL/K8Y0IoLCSPWvrpWkWH5uBrD9z3XTEa5bibUiW4Yqs2aCJqy3IroCikSheZTxqD+e3V6513Gr9fS1KNjtX2+ygPgRiiLqzu96C+oPv/6DfyNB6R9RKGtl7237/QklvZY+THTyq56l9dpGBBcEUc2EcJ+IkzHzw4RFJMmgWFsPa0inrdp35M4xD91G1hgcffHdudwjT6pwn1aR4alBtaGC/33M6nbpLfUqJeb7210opyfeWGFRbeb1cxOx5ERZ7v99Lme99HwQ3DqNcKw2CtRRODw8cD/ve2WYG0B3ywTHFyPU6s58mkkpxguLT0nETqNWMgmVonLn0B+9ZUuotra1V7W6T5gWZIhs6aRf3e2mgUVDc+yBSqZIJYcC5oHHDwO9KCOo4BdQimsaUMof9gdtpwnnHhx9+yI9//OmKp+pm3BRa6J+pmctU6C5VXhMPqzBKFYNiqyL6PaaZezchcTYWWbS2vR0PwVt9g+ZC13UKzCT3YcCRnYNxJDdZ95UVDint0Qr/WoehCI0GdRVAWRZ99+QJL19cefP6DS8++5f8xp/7DX7y4588Min6qsc3hoVvjW6Q2jOh2roJRs6ZeV60la90AkLGSMyIR6PKckyeIaBmN06uihGWkhHvSzFlzfr3ai36WqW/Rqmly6uMVRdSygJotURYW+ZgSZnbu6didabGxD545lnIneBFg2c4pg1EA6tclADTAsMqePldkS0FB55Ka5m7J7dSdGmWu50Nn4t0BNlj1k1i4H5r4m40juOqekB2bzNVNi0j0EmzcRh75tZaY7eTOUfCatuGWCXwXS6d5Hp4OGEyIzNsNmJJSmDU8d6rSYiUpq9fv2a+zgyjjLx2QEqLll+V69Umfept4xTfLFm8T7XH/bDfU/PaHpyzaIqHYVASaG3CCCEwmOWhdywpiXv9OOl6WxiGgfP5hKkKoqoXclG5lld5Xtt2gTe8g1wa3kfB9/X6iRNSViKscTyMQmhl2SA8hvX7DpHImq+9PLf7Qlj78shE3KwAW5P3CIZ/yzUdFBMGaa+OMXI6naQs751nbYOhQVVDZU/TufLCZUglY8HU03xgrqbtNGMRNURHITP93+ZUPf76ebHDNh8r5Z1movp5cTZB12acVZ4+e8of/P4f9DVtbcNf9fhGZKCNRkrL+m/NZuwaWW971F7e1ta2SyunTTbRB13pjilAvLnPrFKTR2U9zYjLvnAtoJkBRHeJ6kTUKhCuXaYkf+3u7glVFcI+CEv69t0bBbptVvuqFDApEf19aLsdjnEY2A0TYYhMu5H5euH+9Rv+1Pe+Az6I846aapRcKDkJI6wY4zROgmcpMeNVWdCnn7Z1RMa6cUmPsjmom3fmOI39ZrTRz4a7GbkUAv11vPecz1KyW9k/DGtPvPlObrHurK73IFhlf9+qCkhLYjdNgtlu5o/HKBnkNE2Mo5yL1mpXLzhgN46cTycOhyPny1VmSqm939qNtE4LLbXQFhP9p04+pbTQamUcZMTL8XgE5HNPQTYWvGdOCXB9BLNVCRIIK3HY0axV0jukW03UKPb5x+j5/u/+Ds57rteLZEvjKCbCta7enU7Wipxj+V5yRv2jWZbEtJuommg4YAjDRrdccLUyxMhhLyRVa6j5y8R8va4wQfNSLOuG7Q93lPNrQqmY76dzK5dQWyM5GUwXqvILzkaNaPde+nJ2cttE8WcQr83ROxB1jWb93CJx1BHb4zrC/Gfhsy93fCMCKNBZX8m+GzEqw6eYY6utz323knfbUWNdSJ7VlsvMH4BeguFWEwJpuVtv1qqlqZXYlgnZsWpHEcxNy3npCdbOod2R4XiHb5VxlOcOce3pt0XlnWOaRmKwTqaNf6mKlcHYZIDK5XxmWRZunz3j9/7oR9zd3RHV0VxmldPPBa0R4sDkV1ORLTEXrHW1abmuInUz3LXMsJTCNMrkS+s4UlTpUdC1c+lVWziNE+/e3XeJWNUgXXo2T8e6h2GUslyDqbHeBiN0U5EQmK9X7u7uOB4PXC7X/nc1aZJ5SE5kSK3KZzJiq1Qpxa+Xs3gDFBkgJyOgvQak1M8BqOC9aZeRdz1Y16K+C8vCfhp4uM7c3txwOp+kPXMUw5qmGRZI80bJF4H/qpheu9Z0DA3ghOkPCqfE6JjPF2IIHG9uefv6NW9efU5tcHN72wMtGB2gnUKIusOFKEJ9HwlRRk3vdjvp51fixu4ny9qSVnjSeCJQVEP6yXvX34p8SfITImE84q/vCK1SgaUVBqfXvhVK2NFQ28JaRJFRC6EIuVT1nvzZsPDz0k73M8HT1mRv9FSID6pcq+g4nS+EYQStWMZx7GvLqtevenwjAuijoOHUKu4LZ8iMC5riSNYFAsbgr1pKe94wmBUZXV4ErMB5WzlHW0gCsDfmee7ByXAkY3lLqSyq+QsqdG4Onr73Hm7csz/c8vazz9hNI60W7h/u++vYYV0h8rHlBjW3dIeMC27VPDDX3xvHEYD33n+/kwZ0VnlknCZhkXFKIK1Ccbc5r9fLtZfseCnnpjh1TLQvyPpYf1tb7aTX0KU2ChE0uY6++X5DS5eQYKLm6GMZnWTfYqw8DCOztn/6QbPNzdz4rUzNBP1dEaAKDK8Sp0E3T2gMfmDaiU1bVi2lV59OHwK+lY5pb41FTHgeQ6CWjKPpuWndBCXpOJTT6czpcmU3ThzUFQonVoxJDTZMhmfOU+M4kepj7wRpFoiKO0fm65m3b1/jnOP29o4QAufTQ5enieN9Y5wmDYCmsxDBeIyBVDeIunM9KWg9oZDf6aW9GXP0VK/15GNZZoZhpOP/+n8hBNwwkq+R4DKOyuhULRECQWER59Q7FE+h4CwwO7pb0/8bhyILfQ33ll0feP/99/Fepum2WmkhkFPqhthf9fjGYKBGFllQsutnpaDfZCHea/amHQf2e6a1tN5nyzJ74NhgKduupFUILzfp9XrtZbCN6gUURoj4EInjQBgixEAYB+I4cX8+c7lcON2/Y3/YU5rMTDpfRXPmg9i/hRi67rJ/RlUHGKZl831sqzV5jJVnDqdGHWJ07NXnUUrgFYKgaUnt1kFnRpbYTWJZpkmetgsvxNDx09Z06NowMmnbp12PUjcieW+u+esMnRBWk2bxPbDxEEJkWd++lfpLWnqWbJpg2zANnugBVm89+xvXeZYAFqOMClEMfVBNo3cyVVI8Ata11ZrMmZfZTwrZeGn1lPNR+/vIRTSeVTfq58+eSUCvGpgVi1xS0uTS4dCOHXS0sbHObTUOF9w1Qi08vTsido6S7ZZaOd7ccjwcuV6uXelgOGJW39yckvh2Kuwg5Bn9fqm1ijAfegYW9Vxtou2jppFaG9O0Zxwn1g4jwTipGTfs4PYZix9VNK9OU0YE48RXAC/u9SESldwSNcDX6wR6dGzqe1vn/Ue63l+9eiWTdXVdoRvQ2zdvvtZLfiMCqAWyQbWCcmOubZvDMHQnH+fWcsvkNqYFtdK+qjOQ/FC+Ga5pXz2Y2BpuovkzTCRnmZqZU7L6SG5IxTVdiIRpJI4jLURaCPhxANe4XM+8O73l/nzi/vQgBItf21Id6/uwoNOZQH0MB8M4dmWAfc4uOypVjR1cn8luGRnwyEuAJlpVI31gM9Pci6v71pqOxlrG43rWaO/XMlSTQi0pyWgOzVatrPcKbRwOR0zlYNfTTD1wTjWlE0aqWVYryorHGahlnsDq5WpZI/K+5+tVZESlsNvtNhmlWuXpuohhlWV1B3yVc1kgNdY6xNDLfnsvMciwOmtX7QoQJxlnf59OmiTO5wdaq1yvi3aJ+X4ubVRIa4WUZnJeiJq1yRekXKR9M4im+e7uTof7qQyuNVqRufJLytQmHUG1rU0k3bnM+0dEy1pxhU6aGrSUbIpsbbx+/VbvF/N+kLmxuWaqD8TDDVcCRfWpuRQWhaBaLdJurRuFcyuZ+kUh+8+QSV/8afs5eunNYYoDfbpUDrp+uhRNM/CcM8+eP//Ff+yXHF8qgDrnnjrn/r5z7l855/5v59y/75x77pz7p865f63fn+lznXPuv3bOfd8593845/7sl3kNG/BmEc9mlq8ZputB1UiJbR9zDwC1KrsoF38Vy29Io02ZZjdD7xWHR1mPTHSUgDWnJN1IwePGgbjf4w976hBZvCN7uLYMoyNHx3jYMex3hHEgxAEf4tp/rHinfW4pxUYt99yaGWxG6279Kztuhhk9+83soCjAv8p3YDPKFxv3i468qMzLaqSwbU2FVTYGq1uSBeUuzm90OGA7Q17O7zrS2QD9RcdD24IWEkMkUkMUFrhtAoxN0zTHdLtuNCTwasDtZMjm+iZtCPDei1FyMdzPd9bashPLiGmt29WltKo0Sin9Od7L385ZsnSaONXbJmiZd9ycj2EILHPiqoSMc6KLtA1VOn+kQ+qw3zONI8MgWWMumeNRNqLTwwnvVradVtmNkWdP7/j429/ig+fPaM5xuLnh2fP3lIFeCcy+KdXVWFukgImGtNXaRlAUP7eqcBqnvl4lguk5qwlaoYaIPz6nDCM0R/ADbb+jUSk10Uqm5Uy5zkLOyuQbQy7/jQ+rJI03MZzW1kbSJEnuHDa62a93fFkM9G8D/6S19p8650bgAPxXwD9rrf0t59xvAb8F/E3gLwJ/Wr9+A/g7+v2XHs5SfW37s7Yzy/58WGcQOe/wrDsluoNbYPDeYwko+rdW8ol+s21bAtOStEQPxKjyHXVmF5F0pnhzuYHiGgTH7rDDD5GlZh4eLky7HbWYO3ag5gJuwYUoLGtaoLVOfhimK+9f++X1M1pwcs569AV3rE3LP70BV4G2kkPIze28BQnTGiqRUQX3E//L2tsFrRmh5KzdMmKC3A2bnRcWk6bElDD6w6Bspn2e4PVaamDQcne/P4igvZeGq0mM9PELnuyq2r4psdHMa7T7wAoWnNIic5pKobDOORqmidyaTMusRY2QZx0rLCX39XrVjqGZadrJ6GTWDcOyT1FwFGIcmKZRZj45sXpzXj5HyrW3iNZSGEKgtEZpTqRKDnybKWXVWhoXA7q5tUzwMF8Lu/2REAZ+7/v/mmkMuODJteGCZ5oGqrbt+la52U8MwyQ9/pczF+DFy9csLfPysxc8efpU5Fu7SWdiRc2eZQ6YjQHvSpUeSppCRCKRoonTfSHQyB0AlQGb9jtqm+cqbdiTEI+CGCaCdyxpoZaEa1WIJFcoYWDemPb88ceWhreHTLomSga3+bmNdOmKkyKwihmqWPKRy5+QobJz7gnwHwL/OUBrbQEW59xfAv68Pu3vAv8zEkD/EvD3mtwh/1yz14/aL50Nr+x3W7Ob5lBN2loa9ZvKwG/DB7tGzbJX3y9w/xzeKeMaVrMRC54pY/Owk46ZWBbB4EKQG3KYJlm0QySMI0+PB+7ef85wu4PoSBQerlfmOXH/5kRZCviITxmfI3WeGZwnO2gGaofH5imC+amBipFXalpr5aDd3MI40rHAZjrP2nowGseJEDaGFx0moLvkd+KEtUw2QD3l1EdiDOMo+GpnYhsuCGEkUIDv2spZjZQNn/RBgqKZfcS4dv88klOVupbZdQ3cOLnGJ5VEXa8zcSgqflc3/Z5NOXa7vWBbQeRV13nGRorkLLpYWwfjqGJ877rFngRFqUa2wvLT+SJmNEn8SW1SrG1uowrzQ4xislJVWldFGzkvCec9N8cbWdPdOEm6pvaHHbvdAecan/3kU66nd9Ry4IMPn+EV53Q4YmjgIYaBu7tbLqcLNRdicJS8cDndc7qIDKlcF86XGUdjN039Gtk9FaJgkKKTFtZa1kFjHAO5VlyUz1xo/T5p2rXevnCfSUCSJKj6gHeibGil0HTd0RrBBY7HW/bHGz799MfQNvjrlz0eEU+PkyQ0Goi6RAZ1hSCtvdfrlcN+L3GmNppfK9WvenyZDPTXgM+A/9Y592eA/wX4L4EPN0HxJ8j8eIDvAJ9sfv+H+tijAOqc+03gNwH24bD+wCQSaMuZnhDDoizLkYuwjpUwY1nZ8WSYGNAzOiNqsuJ2nUxpYq4g2ZbX7HXrOAReL35VgfUueqYnN6Sd4/DsSBoqMcIzd8PlOvNAwl3BEQmp4K6zdPjrmIvqZJiW1xJXNgULloIbdhIDT3NrdmgEhrjmm/QJPU8rJmgGuWuJZoqDVXVgrL+x49aAYPACbe1yedQbX9K6+VfxebSsOpd11tT5fGaaJpakLZWqwbNpkdY0YZCM6UlNYdE9DBo6q2k9B2aoHUJgVtw6qcwKxHEpuNizdSPEvDovjeNIo2nPumaEtZGbmUN7qhP3fac3XjRyUwOEC6JbjcMAyMTO+/t79h9/vGayqi5JpeFj1PHEwQorwLHb74nxCDTCEIlUnn30AeGj9ygKLVSKOMXjuNnfdbJxfzhyeXjA0bjOM2Md+dXvfEwYAz/4/g84P7zmo4+/S2q14+RZN5txnIRw0w8lrvuShco1L+AGlgSlrsQuZNbGjYq1Pa8KGFkPPgRq9WJZ5yC3wjgNfPzxdzmfTwIfzcs69eDfiIe34Ol6EGkaN5xm00YqJ53QYEHT7qOvc3yZABqBPwv8jdbav3DO/W2kXF/femvNuZ/nePGLj9babwO/DfB0eq/RFKf0hsO5HkutDGwhwEa7lpThNOZRrMSCDpITaVNUaYsQKdrD2x3ShRE2N5QwaIEAACAASURBVCSvWa3hZADON3zTcjYGXIwMxwNl74nPD7yOM3XnGG4m/BCYHxIf3X2XF5++gqujnJJMD3SezEWlGx5KUR2iVyhT30OIClkYM2wyLRME2xA2m/UkLvJLEqmQuEvJ+7+5OW5GZuiJd6KRrBqE5THZqb2OnliD8tq+aPAJTciX7tdpBJeXxRuC3HzLvHTVgUuZOAi+K2Yc4kbUxxU3G6vhaVU2TWOlDdO00F9q7d0yTSVlWxKxKQF2ejgxPhGt5DANyogvIgPTcyE94gGQ8Rj2WqUUYbR9kPJVycaksiaB/tYxKoaheue4u7tbmwq8p+aFWuF0OnG4veO8nFmZTaefteJjZL+faDURXSUA3m0qs6puYFWnDdRKTpVXLz/vUErKhcv1Ac8DqRRub2949vw595cF58KaTascTGRjklS8fv1Gq5ZRLfFsFlYgxIFht0eY8obfVIpogEIDalN1gW3X3jtykmkBIQT2Nze8evuOp0+fcHl3z+eff47xFV/r2BDBcr9sW6NXslYw96IbtRJ9G+y5fXGI05c8vkwA/SHww9bav9B//30kgP7USnPn3EfAC/35p8Cvbn7/V/SxX3i4zQns5sOaURa9gduG2DAcp09T1MdzyfjqNr3hciOP06idKWuKbzfLtiuoNfGA7DiQQ3E3R3OOMI64caAOAb+PvCsXxt2RZaqUsVBi4TrMPD/ewMNI9k3mtefKkCMhD71Ht7UmWJkaXsggsLUjxyueZhkbwDhplwteyj/nWJKUy0OMMomyCYSxTtjUVj8t2XKSMlJYaVEYmNSlthV7zSVLZ4tz3akHLdVlebp+rYIPlJpxm/NaamU37bhcrhJIyjqqRXxf6Rlm1Uqj1vwoo7ZheIu2WF6vF3a7vZSErXVNbK1O9ZmaJetmWVVHm5ZFymofOrNvRFUc1GxDgzUOpjh1OMO+i3qiddIoqeDeMmkJwqG36eJWcbb3jidPnlBwChloBRUCzgdiHHF4zqcrd7cjbVkorVKcfLaO3c0LqRRyXihZAuZu2vPu9E4qJLv2MXJ39x4xwPk8g19VFL3aqLWL6p05w5fCcjrhw6rmwEHNV9xy5ng4yJ2qsE30fs0+Ndupm/XdpzRprH3y5AnTXp27osyRKnmF3r7qsW7ejw2CrFKxGGEJSFTFxOH2pitNTInydY8/NoC21n7inPvEOfdvt9Z+B/gLwP+lX38F+Fv6/R/qr/wj4L9wzv0PCHn09pfjn5JCWyZm3S+tWpkqz7BOYgsCTUszWxTee4zHb+rn6RUjy7lsjEIUA+wAlOz01NLt5GDNqIzVw4ELHj9FygDzBMO3jrzhzPB0zzwmhn1k9/SWXD0f/Zlf5+Xv/JQWLrjsaRV8kTndRfuvjZwwNtrwJdEQStuZ32IzrSkmuZblRsw4HwiuKcY3immz9TS3x5NEU5LZRFjw0iAdoo7oDatPadNs3nZ1Y2W9RMJNsJKMeJgmSs5M08TD6dSlQXGI1Cylr3mWboOTXVuba2/4cFKHeO9d1yB6Le8vF7G7k84geZ+WaRwOR5rz+CBZnuHFNPHuHOJApbGkDM53N3haY5lTv+lHxX4NWgve6XvyHZKQ//aaoe54/eoNeyVtvPe8ezhxe/uENF+x8SPOC85eSiZEz243cjjsudwv+HLFyT4guG2tnK8Lz9//FsvpjBsn8Jlx51lqYbh5QqmF68OJm+MRHweWVpjz6rxlwcbOnagJCj/68U/kg3mBUIb9Tlp9FWaorfVpp9fLBY9jtz9ogKx0UxErZpxujDixunNyk9VS+fyzlwy7kePxwBi9qkR8lyN+lUOup5fx2k7UKrK+PUsS9cm0n0hLUiMVGVq4202CfzrJjkV2t/FQ+IrHl2Xh/wbw3ykD/3vAX0VO2f/onPtrwB8Cf1mf+4+B/wT4PnDW5/7Sw0HH6pqWAlaCb7GwqjZeyzI/Fouzzmw3dtd2IpxZ5K2jfLtA2Jk1m+sZ1haDtb9TNbhJe7sj7CI5Qhsa0+2RZSqkkNk9OUBppGsluMaH3/s2Ly6f4qcCJydGzV5ExIJganapQdoClQxC22oIaw88bJ4Ha8nivSOlsrqkZ+uAkU/SM1CT7oAaP6xfVq43DdRmtCH+odYXL5hf9xygETe92yb9sW6ZZtew1Ec3sRGBteT1WuEQJKipo5MFVt+vd2vqdakNEGbDFhRWsCojWobTTHKkFm5BAp+QRFJr2GjhQfW0hnHWUjteVktlGofO4JZSukeAZbphkL/rHMQGYRi5pMTxeIPJtsZpYNHWUucaMTqGIZKWmfsiblSjhxkZ3XHNQnDGYeJw+4Q3D+oR2jJNM/OgAWIYR1WP2KTZuGpN9bx4Z4PhHC9evKRoteODlymwXjbW0cY0I5ABVSopaCwlM4YgEIyx19uMzqAUWXjdiNm7xm43cXd3xxAj5/N55SO+xjGMw6OmhVIK47hnQIy1S6nSmZdF1mgVi50vCbjr+J+vc3ypANpa+9+Af/fn/Ogv/JznNuCvf+V34ozcQMs7xStNamCSmFp6BmYXy+aED1Fu5Elb22QG0Np1JIShAcYquNc5SmmRctapsUdTdtH6ytGF1ygc9iNuN5AjzC0RDiOH40RyC9NOXV+Adm1853sfcpp/RDnNnO8Lp9OF+fQAObMbd0xj7JmR4djOr9Iri+irQcc6vsRZdt3EbNdGdEzT1M+HSYTk8Jp5r/6pMUiWPsb1RuuwSC+z5bMvKYlkKaXVpSmsG5L1wptRiARkIapWob72koNmlp6tyYtsdOZwJZvpmkEZqVVpOgfw0ebbWs9gV6imUis9WwymF+2lp5RwMk75sejcB+lcorX1c+smYbDQdhO3fz853nD66Uv8Yc+w27HU0kEq2+yWRVtpnWOZr/I9JVLLXJEAPufCk2fPydcr4+FIwVFxROf6xtG5Aue4e/KEzz77jGdPn/bZ7YYl2zmyDeCHP/oRtXniOIIPuCHihijjQmKQDju9bi4XKBWfqmD3rZKr2cTRS2BrVrHON0cj69prtVJaZZ5nHh4eeO+997rRzJdiwJt9WyvR4/HI/cMDu/2eh4cHUeO01Xry7vauQyzn85lScl9vtST9Oz+rJPgqxzeiF94O20msLJKgMTDo3G2QjMvAcOtmyDlpu92mnx3FGUexKbOjAWp808uZ0Z7jLOPT7NZE634t951zjNMIg7SsOZXKDD5SaLx7945jeEpxmcpEc47704UXn3xKvSzEOSsO6jidT5xOlWfPnombkNwJErg2FndGbvXFqVlFb8srtT9mmfo8z0yjymXc2kYp881WYiI1mduTW+ntntVuHMVDt9Im0KxdM3U71zIbiZ6BCqnidS6OCNB348gyz6spiVYL0zQqgx46nghr1gxreW+yIPMlra4yjCNpSZp5+g5RjOOoMAYIlFFXq7cma2mIUefOhx6IbENwSLYy7Q9czmfJaE1DqZaHpids5hLW4Hq6ZxdHrqmQ3UKYRkpK2mHmekALulkEzcinaUderh0Ddshnk7EiAw8P93jX5G+1Rhxi19/iZMTzkydPROBP611Y2wBlqgWZHAqECCHgphEXI20YaEOgDqI/pVRIhbok8AVyoaYk7vGqfcVgMcXOe9XkjZTNBO+5ublj0tEs7ssGzi8cdn8C3L+7J5fCqZ05qM+s957b21s9N6m3YksWKmvy7du30vzQI8LPmpN82eMbEUBbE7FuLYqJAaXlnj21JrtGJwlAyZ/a8dLeQ09bZ9Z4p1IMAEfYCGcL5dGQuN5K2dSSQbOjOMS1X6sV9tNAcOCao1SP95F4mGjA/dt3HKdbwiILKmV49ZOXvPjxKzybDhBlsnfTjjSfeXc6cTgc2A1yw7thwDshPWpaSGkWwXKR2S1LTb39sij2Z0YblqV3mzBdIJbFWWCQzMEzxXG1pnNFZyLRiQbLPi1LtOzXSnvppJJy6XK5qAGv3LynB51f3hz73U4w3yFKV9eyqONQ7PKjJSVsZrj5GUjl4P4f7t7lR7YlS/P6LTPbD3eP53ndV2YWRVd1VdETxIQWYoCEGDQDeoZgQgu1BAMmzOBP6DETpJ7BBAkGCAZMEBJDBg0UBQ1dVVmPzLyvc88jToSHu++HmS0Gy2xvj5tZWXkvQrrdWzr3nhMnjof73mbL1vrWt74PxJPyXLQAYhH0sPc4FyoSrJ7sKWWz+vDOdEGnCXX2/pu2Wcq6ykawjG6djDofw0wp4rwwjdEsOArcUbNyq2rERHCcoLMZlYylseRwTDkjmP6Bc+a15CSQnWloeufYbHYc5xGnSvbGo8wJmrYjNC37/cEYF5PRw+ZymJZmONX4rkJPlXpmDAe/JAjjOBGTQPAQGqRtoO/RvoFNQ2o8uWsM/shY8Bwb3BhNsm6YIEbSPFMa2hXqNDW0ZV+vwTtl5eF+jz8cefnRR/zf//j/Wb7nO8UKHELicrvh8XRCxbNtOxMW35gp4DhHun5DjCdur6959/7dMtl3fXNdeNSl3/Ite5jvev0gAiisFhpC0Rh0q7UxsGQmVbexZidL91hWMnoVk7VmUW1MlHLjLMOsuB+sNhXTNHO125QpngzOMaeETjPdxYZOAiEKLglN2KDeM49WWFzKJc3UMH8YEBd4eP3Al3/y52xihpTxOPOg2QbmYUKd0Gy3pJQY5okXz2+RnDieTghGzUIV8S2CkuOIE1bxjYJxKeClyqZZsOu6filpn2LJtTmWl6zUe0fVWT0vm+0QCnisbK7K73U8cR5n2nblidax27ZtOR6PKxwAC/Y6F6EL7/uzbLAS+N0iApNzme923qxwxbDCXKaoDK+MpXufyxoyO2fJZYZ7nun7C4CiX2DZaRXpBui6Hi8Q50KbWvDmyGazWfAyYwtUq2vDb5sQlrWXckbKZJc6R8rQ9D2USSahspaEagtd1eOdr6pcGS0QS99tiPPRGl5NWA7IaRoXNXzKXqnSg7au4eHxkaurS2yowjKvlMq+Ae4+PODb3jLPJqBNQLYdufWkbSC3Qm4MU5Ks+NSijxONF4hK4wP5NJizwDSxaTtKEwFYg+J5SKo9h9oETjHyvZhDJQHou5am8dzdP7C//4Dzns1ux8XlJQCn44HH/QOH/f7MhUFL5VOddlfcfmEdfMfrBxJAC6DrVovUhUQNT0pyWQIFmLK8bewaAKvUmtn3yoLPVC1QsM23+OCUDd+2Veor0QbLZg/TQNddQDIDrRYHUyI+nIys30AePQwe1wktjvF4YBMDEie+/id/QXtMMExIymhMiPPEnNDgSd4jwbibQeDNh/f8jd/6EZtdR07K/f2eabbAZmRqaAqvNTTrDH3FfoBSxvpS6uYnC6SW95VOVK9YPIfO5dwqmX0cVg7fXDv24shqEmCWZVRqjM2M931H0zQc9LCQ1J1z9H3/ZERynqdlaCGWEcjgLaONKdGVpshcYJ1pqhlptuBJaYpQhWLc6rzqi15nkQbSlAgiyz3wPiwjqL40UYw7e/EkW69ZXEzmDFodEIwhUGhiIqt4S1YkBCR0RGyc1JgDDnD0mw13d3e0TYd4OxzUCS4Ix/t7xDtUPK7r8ZPN2vf9xvBvgbY4fwIL/zSV/VApStdXVwtOHWqpKlapvHvzDhcCeIEmoF0Dfcu8seAZLx25E+bG7ptkCMnw4DB68nEm5gnXBtxsfFxXmqLpDCp7ms8JXdvY+nHC6XRCliD7G3JA6wsqdG2D5ETrhJe3N+wPJ07DyHB4ZBxOeKxSq9N6ZGve+uJOKwZ+F41h/WcjA7VO6VxOi6rGVDvNpewtwaNmUcAiMQaGw83TtJTfxiNd+Z7f5vUZ3pgLL9HwsHmaOQ4TL57dFMJtoOkCE4VQHhPzaTZ6kgedPcQGvw00Htzo2ajDZY8/zPgxIXNayMXiHaE3UQtEaHY9CnRdy8XmFa/v7/j4xQt0nrl1wjxGPtzvEQXfV0qNW8SizzEhLc2OGiCsCVZtoaXcN7dklwu/UdZDpm7CpmnOSOpFOq7csxiN9C5OGKe4SOypwmbTMwxFnERXBgQKp5N5Gg3FnC2EsNCcBIoCVl4wslxsP8weo1uCX8UqF3y2dPzrM845I2qTTzGVuXGB0zCszTkxUn9tXPmqtl7WY9O0i+hExTxS6dxX/DafbTwPRsafIjk0pMTCUyYLuFW71qbeLPC3WLbahYbpNJj/VGkUuqaILi/4d6rMMeuw5zqOadqbq+OsMk0TbdMuDdbQBJxrKB7T9n6cdd5z44mdI/bC3Cuxh9zaISRZ0Cg0IpxSpGuE0DakyQ5QaQLjNNKG1UROSjK6lvDKNM9cXl8xJ3vGlOD1XS8RpSvd9Nr72W36MutvePDN1SX4wOdfvy0IlsFy1bbGlPDdSpn8ng0k+KEEULGAWDeVZU61rFzLzlqqwVom2GRJCYpngcEFWTg8lc/IWaBBDFOsZmI2aWTd2Ckr9/sDt1cXqApJzMDMmirgUkYOs0nazYqPDj86cEZubsncvXtPe4jIlOxk1oyKkLwj5kxzuUXawOiU0DWkvkd2O7pNyz4mLvuOvg20zUjbmFTcw+OB2QfG02iZV1rl2SxjdKV8PpWGTCweO6tHUg2eNQOs2WEVPa6Z3CqivHa/fQmsw2CNDlPDyqXE1KWhVU/6yu8zr6lpaShpNpfGscyo1/dmB12g63zRZLWqAGFx86xZNWr6l6EcFDHmpZlTsfO265gmcwNNMdJ2piFw3qysMnKWhZtcojWbTJC5QnR17FY1G5aekg15lIEHMmwvd0w6cki52KYEg258MJmNnJhKs2tZ9smSh/3dB3auoZ0io4NGhSCeiDWHNEfjRnu/cGQrJW9OiWfPbrFhg2HB+a3bb2Ordg9PBjGLWCfVO7QJ5M4TOwue006IXUY3QHDGXZ6ErNDNnjkn5DjTeldm5CebfkuJII6sVb1rhcoM7xWGYaTbdOwPD7955vntS43riQiqNgCAZpxoEVbxBCd88eWXiGsNNinJ1sXF5QJrhWCasItNyT/VXfjy3puiR2gBIS7BsF6VvFw7aucq8xScDijKRpZNhDO/dbtZVTSBgs9JQW88vunYXO3sgZN493CgC+CDo2suzcY1KYwzisMP0cbaciYdZvAgWXnz4cjrr98iUWgQNBWupXOoF2gDadOQG/u96wPsGnTb4IIiMROnjEaHayDogKbMza5HVXgU5fF0JKuQ40wbAl0X8CjbRnj28pZ3H+5RgThN6MKjdKgK1fm0cidXUd6V6gJlskvc0mWVaUIoSutpKie4cQPzmXJ927ZLI6qKbbgyHRZCgy+ZWw2ytUFVN+E4TbZBgK4Q0uu13VpH3NTeIwnLyLMqvtKqsqkC5RTNWnjTY0E2Fi7g2mhEtWCp4LDA65wnxrkcMuv/LXuRAmVkQm1K+kCMA+k4MBwHcmjBCU6LVJsXJFtGOg5DmVZykCFZj43pdOTCFbEZh40NR4eOGRoTaHGipFRU/FPi+vYFTbthRpmBIMqzy0tCeR4//ZOfIiK8efO+lNrF8cCoGOAc6iA7IQdIjZCCkjtFe0Ai6gH1iDcFMu/UsF5JZkcijvF0IDhv0ICse2u9BMiM48DFxc4gEJdYVJS+w+WlyOeJJ2usPXT7KYXmd4owqz1Lca7AeNmsdbI1j6uBYHUPWAZrvuP1wwigpfyi8B3Py9Lzq/rVrNQDXcqr5YUKppExQ7GFW4htUO/82jySOvaV8U1H6LY0PkBWfNuSU2R6fEfjMjrbpvaaca5FpglyIh4ds69Z656xjC7qbGR6FxqCOGZR1Hu0C7BtyBeeqVHCpae73rJ9dm1iFp0SDyNjirRRaXNGoxKKSAo5c33RcbEJzDEzzckyTDWOnBOBNPHi+gJBmFPm/f7Aq89+wudffoVPZYrrHMrAstDmDFedo41y1qyv7/tF/CMVx0kTS+7NMfLMrbJCJQs5v0Asmgu1pbAjKkXMvONDeVZCjFpglZVQXx/xUHQka8meUiI01gGvxGhbK8ZDHceJqbNssgonw6qPmlKka03wBLHPXvmCFR5IKdF1XTkgKl5vnykno6XpcSI+TrhnlzCtTbt6b+sc9jSPS6ZvRnI2UiFNh6PQpC4vmVVL5zyTciwOrMI0J1y3ZXNxzePgrQLyliGPamO+297ROuH3fv9v8s0373nYD8amSJUwXmUjC5/UC+rEdCg8ZI9RlpztM2MECBiiUNgM1ujzmPcW9dDSaquz9iz6vjeqmMLbN28W5sP3yfrqfY3U18fwzoJDa4D3d3dGA3PVjsfx6tWrZW3XameZgMrgm+9n6fGDCKAWA3WZcT5XRlnj6JmCt0JK8zJtssxPF6zUyO+l4768ztqcWkpGrbPBDa7pEN/ifKDpzILVdR2X28DD26/pvMfnhM6ZVFjcyTuSeGZp8G2HDjN5sE65qCWrkjKhaVHviMGRWo/2AS4c9EJ43tHe9ExNxIUGF1qyEz7cHWhnpTuNtDETNNOh+MJlCy6TxAqhOoUWfGNSbWIBxCNojnx0c8GHN1/hk81SL7idry6nKynfuSqbtjqfemde7MahFfKCn1YcMdO2fbFtXoPjynqwMr66RsaqZ1l4vDnHgtu68h58yWCbIkFXp3/Ms72OTRrmGxbVLhFhmszzp2LAFxfmmNkUp85zClIN4HUWvpLwa5lXg8B5U61OvIkYZt+EBlFh1pLBCiSLNavqflmEWSlK9KAIY860BY5IwTOiBNdB3wNizc4pGnyQBec72n6H+h1JAz6IQZnUA6VnTpmEZb4O5dWrZ/zpT//E1PPLrH6WBRrEwEqsqaMlU86gxT2TbBWUmPC8HdQxIZqRVPyn8kgfmpUCWE44LWvMh8C2bTkc9gblPNnX3+3yBe+mfGar4HOpThvAKgQt8cA5x9XV5dK8XDjkISBpnZv/p7oLv4LNlUKzdo8rJrVQI2RtLFkjaPWpFqFY9BagWMv45Nm/q1lpzWJAmLOwbbfgAt47ms7TeIeTTAhbLq5+h7uvPydINiqHCOocc1aOw5Fxymwvb8iHAR8jpr5u2OFcLG372yti63ncAJcN3DRcvLqgf9ESXabrK20KaHbcXjxjfHtk+MUb5rd3pNOADAMb79m0DcFlYlRiWouYOEfrDLctH+73HIdI44WPX73gqu+ZTwPxDG+sh8qq9pRZLWsLR/LMI53S8e9a471aQ89O+GEYoBLwZW3unGdy42TZVz0MK2MAGtq2W3ik0zTRNO2ChadUDzp7L9vNlnmemfKIYpBCdSGoP7N2z723rHOe5qVBllOhFZVAuvJz16y8fi/k1bwur+t0niO+MXm68TTyydUtjXOM44nW23snK1kTIp6+60lFVX6eZ6NVpYwUmtn2+opt23K/t3J4o66MobrSzAu02+dEazuBM+dMksMQvELVEzjsM6NTdr2n9fCv/it/mz/6w/+dmO29J1VjJ6giSfFRcRH8DG4SgockHnXgouInwU3Yr2j/JqgR+behhQha2C81cCJrlRBjZHt5STMNjMcjC8lJzyG43yxONG1rmXvGxnllVeCKpbGq5X3gHNvdlq5tC1dWl/f4JPaUQ+T7XD+IAFoBZ+qHwTytbZOnpetaF3A64xyileNpN6Vtw4LpeR/YhFA4jG6h83jfEOOEsRkCfnMJuILBKl0X2DRCJ25pon7yyWdlgimx3z8wDgebCIkRP0Xm+3vm/UMhOCdynnGNYUc5ReN2tlu6yx3x0uFvWrRPzGGkv+pIIZMnQadMHxza7Wik52V/yed3f0jT9TixbGl//4DLkd4XPNFZOeTF8frNHc+uLrm5vODiwvPV/p7P3z3w8bNbomS8a1eO55NAaqWzlEZAnRQxo7KwcnMLP3adU5eFOmYHoFsy/bVaSAUekPV5Yz7h4zjRtg2n02npGNcD8HwUMcZUGj2O/eN+gWVQZZqnQkmalyALdhBbiQqo2afEkjXW4QmbY18PVFFrRLWtfc+cEqFtSoAoQwhky8hcwOWZi9PE3fCeT65uuM3CVzoYC8AFo2WJ0G9aDo8Tog3eWbbmCtNEs3B4uEebFt9tSdNE3preQE6Zrm9R15KSgCuTWurQ2oiRhOaAoLz++s+ZH48gMylBHxzeWXbfeH/OckdSJs8JmRLNyYRXri83hNDxGEfuj0fDlWelOUAzgDslQoLrrqHzgf3DQI2YqmVdkcnZqkLvTdTn8cMdj4+PZ2tA1//9miBq6KWlCKJGRRMElbXBJyK4wjjJwoKrf/LRS3uFmvKKGP+0aIOqGhZex42/z/WDMJUrifiZXl+ZDKmpOpRSvciheXeGhbIu/jPstOv6wt9bMVVxUrC2YtvrPVkc4lvarqVtAn0XQJPZrZaM2BqXxgQQH7i8vuXlxz/ChQ7vGxwwjwN5nnFqMl+utKesyBayOKIqSRMqyqSR6BLaKrOOzDqRfKLddWjbkANEUbITXn78Ma5tbfuK8Rt9aBkKkdnI5Zn96cTbD/d8/s1bfvHNOx6PJ1JWms2Gr96+JXIm4FzmzQ03XPG6eq8W8ZWz0qeW17Wk3ZRMsL5G7VJXqs6qY+AWxkC9bCJmAFiELyxrDNSRy0ruN9m4UEr8uLAIKt5ZM86qrlMzEi0dWyvRHfO8ZrZznE3urzSgzmtKm93PhV5lvEVKVqpq7I3khRYhPRzJzjDEGBOtDzYvXnHgkm3Xe2CfveCPczJ+cIp89PHH5DYs02R11FbVPIqqa+vyPrWKwGREPI7M53/+x0zHA1pZLWSCdwVGsKaWQ3GAUxNAkZQIc2KbHNfawcPE8PUD/cmxHTzhEcIjNCfBnzJhzPhpwkXrMZgc3fJU7cDUml2WdeD/GnzxN41dUl10ZTGfrBS2hepVaHOffvop4zgW+UDOdFDDcsBXZ4e1kfrdrx9GABWz21g7YaW0lNVytm72c3I4mMNjHR+sG6tOvACLGnlN020G1hYl4tlc3hL6rSnZNEIblCB6dmNKplXOqKymbg2GpAAAIABJREFULh5VePnJj/jkR79lC6ToYcZpLhzlBk15yZAFhWyd4KyJ4KtupBCallRoN+I9Glru7k+0jdFG+utrxmRiDJUulHG4pjWhCufYbjbcPTxCaJkkEEPPh+OASOBwGsihIepqN1w/V0ppmYypf65qPk1jGa4/s1uZJiPWN01rAg4iS5kGrLQQrWZ95oxZ8eq6WGtJXaupOpI6J+MXhsbw3NrpH8dxCca1eVSJ7vX3q+brOkkVo+lo2rrwS8d9u91ZcJ0nnJMnhnV91z9pAAVfG1x2qOesSHAM7z+wlUB/seOf/52/wX48cb27wE/WqAPLIOdokzfm8mqeWE0w5aPDcCQeT3z48jXH+/1SDdTnojaGZQ0leAoeqq0pyYkvf/6n+HzC4fit3/l9rm8/ohFdvMHqffbiCFjpGTQTYqSLif4U6R8mmncz7s2J9m3k5jGwe3BsHqC9j3SPM92YeNY2NMDh8WiNm3L/LQ0CMGM8J9bXuL/7ULLPb+97qFKRvzY8lG+ookA1UK8NUFuDoetsysp72rZhs9lwd/feMtMz9bZl6CYVa+cyRPJ9rh9EANUSWIx68hSz1OWk5QyDWrvrwIJ31Y1Ty8sK/LtKGxE7MS3Lgawe8Z01LYzVQdsaxngOz5w/3xp6hiFxOBpF6ObZc6qGZUqmfu+dI3grLSRn+6UZiZmWgM8OnwQ/e+Yh0fkteVBaGrw0tDQMDxNkgxB+/Fu/RePbs2zP8MjQ9RyGkcfTWkpN88z9wwNjTAyngYf9A3cfPiCZMsFTm2luCZw1ENbGii+HUCzz9nXks95/E254qt4fozke1szISnrjJla6k1lUB3PgPOv61yriHGqpeOQ6ibaui1Rm9qugjBZMrvKIz91O5zhzOByo2gkiYpYSuXT7szIMwwJZnA8XKOcSbVbCt31PlwWvMMaJ4AI//Sd/zH4aGOeZ59sLanMGLdqYIkvmk87uU/CeNAyEKeGyBYM6418EDzkvd4WnEoSC8PD2NaQRxXH17BPGLPh2y6YzbFhldf4UAe/Aa6ZRpVElpIScRjZZuHU9t7mnP2S6x0z3kOj2ie2k+NPElQ+4mBmHiRC6tZOtZX1rwumMaIQc0WhOnL8+APz6v37yrWdN5TXRsqo0psRc4kedMLu5uQGEtm3KKWJrTUsztjaQvm8A/YFgoGt3XCllgVSdw7zQS2rzAdZSc1Vxt75jzULrZqsLsWYjThzeCSkpTbtBCQUisMZPzNbp9u48B10bNaA0XhDveDwmYobt7pK2v4fDaDzC8t5uLi/pdjvujgeiCOBouh3Od6Qo5AHcATrX4yIMD0e2bYto5Cp4jocTQTzjMNGhdH1PPB2J84RDjZBf/HtiKtJ09Z5pZphGg/8w75720jOpZYUhhGVCJae8KLWDddpP40jXdQtGuKpjudLkaZ4EN6sG2uU5LU0+asNPl1LqXIqv77sl26o+4l1nths2RQRdY7zSOhnUFHoVGK45DKfF5970YidCcEsp751fDAWnaWQcjVw/jiN1MkmyDQr0Xcs0TyyNy2z3T7J9Ru8dwzjRHUcjt3vHYf/Iru15Nx15HE5cbjZ8NZ7Ar2V2ZQdYlgyqjnycEZ9pFNq24RhWHzBLJhKbzcZI+EvLWQs5nNI4FU4P7xGNvPz0t3HNFQTP5uKafB9I4hAXKAKLhZnkiii2JSxSKpB0/8COa3zOzMOEzJE02RhyC2zEM9zd8TiMaDaIy4snqzENJMcCEVRxHtsvbROIY5nqOg+WUr+ky+76za4zuEXXf7t/PCAF2huGYdFhrQpUTSg4fCn5q4ZALnDN97l+EBkosHZsy2lQJz/Ob3jdlPV7KxfRvr56xtsCXUsws+9NCyaZdUZ8iw+7MzDaEedE46zDLnWxAr8qBxVVutYzx8wsDdevPuXy+toy6OBRcdx/uOf48MDGCxvn2PkGThNhUJrRsc0dV9rRjw1hL2ynhvkh8Hgv7KZMlz13b45snPEV5zybADEVezfgfI6JhHCKM4fTgXGamJMZmp2mEe8Dv/cHf0D0a/lepb9yySBrNlgDW9d2Zfx0tbw1RfgqXqyFVtQsgwkpF4/00sCrp/2at9umqv/GOu4z0zSX6ZCAQxhPo41xOo8vWGkt35fmISziIqb/as+94pxQslRMyR5XRYabhYRfTdSMYF4/exWDXpWNXFkPCsQ5E1IkiYlqNDiOceLm8oogwt2wZ9u2bJwv2WLGATEngm+IaYY0kx4euAmJv/XiFW3bcN/C1XZnazdlGh/KphfmIZGzqWUpNj+fsS78cf8e1QFchzRbNCjEzIxAuKANgWk8WZZWNDpFMw4TuHEpwRTRYSIPE+PDnvx4xD0eaYaRdor0KdOlDPPERdfaOPM4cnN1ifcOrwmXZ5zYyKwxLcwQUcTRtU3RAlCjwdRfyrIn6z6vWfK66ct/6sFcM0gF1KiIIo4pwRgtX89ZuLu7x8aYLYCejsfl9VNMi15qnUz7vrPwP4wAWjLMOrd+Tln6ZboNy4avGFjTVNGEourjqtdR6eSWkS2zKfY43yCuKQd6LtYEgHgOY2I+Ex6xH/jLb9mjdMEvPu+I5/rZS2xAXkEzpynycDgxjjM6Trhh4CI52lOmecjoNzPxayV9ORHeJHgD+68GmnvQx8iFU9Jx5PH+SFBlOk346vUgHpFgPjjBA5lXr16x6Tc8f/bcFohz3FzfQM588eUXDPO8WPoCxR++NIhKmWWq3u3y9fP5+HP4AIxGMp8FHO/8gkOej/OteGhemkc1WNlztlL8eDzhXFVdyovyVh2yqBllpRTVBpYR/NdmRv37Ckn8EvZ1NqxRmzu1QsmamcbV8UALzogIrnghyRjxWUvzzsr6t/cfuHQdwzzzeDzyaruDOJPERIXTNJPHgWfZ8eNmw2/f3PKs33D68IGby0s+/vRjhmhUsNCYGvw4DmjOnE7HJeOklvUqOEm8f/eaiOfi8tqUq0owcgq7j38M7oKrbsMcI8fT0RqwoYjAZBO4IUbcHGGcmPd70uFEPg5wGunmSDPNcDrZJFpKvPjoFc+e35qJ3nYDebTgWfQ/K7H9PBJe9G1xFbUNr/yqgLXCdb8UIupz1Aqr2ECHNYMD79/f2RBCqTRqlVKr0IuLS07DaVm7Kadlj1d3g+9z/SACqEAByt1Sbte0fpkaWHiFftlQoIW2kouYQuXsFdkxCsXBr5L/c4zEbBJxqm79+RQMrioMnf/FX/GmHcrlrif4Io2nSmhavPOL+2TfdlxdXqFpRueJeDih+xPNSekHCO8Hwv1I85hx94lraeiGRHwY8FPEpcTpMHA8JW5fvGBKM0oCLeIRrEry1brXB2v8XPsO3R/ZZEXiTFNEiM9xvvOSGlYd0K54iLdta5qorFXCeZfYDOXOJOncqnhjnfL1dK9TX13XL5lw7ZqvVKeKX7lCpE/LBFQIK9k9BF88dWoX1QLpKi6dFnZCvRanz0W0WwpX1K8ZSNm85nxQOK3FNRNV5tNAKNVJLo1FAY7zxOXuAk2Zd/sH+k3PPI3kYUDv9/SHkY/U89nFFdfbDa/v3zEOg83Ux8iH9+/Z7XalWZaW2DNNE+M0FqyxpF4KZCGnCScJlcDlze1yH2tRG8Vz8/IT8A1d27Hb7syttG1pm8a615geqQBe1ZLDlIt6WETijE4TsjRhhHmayihq4uHuHSJV3b9WGr/MFnDAtu/xBgSXPf/0l+nIdk+zwfKNqnrmY1bTVcV7YRhHUinl53k2oe2C258/576sO6NaySJc810w2G9fPxAMtN50rLFBoS2VwLf8PWDAsSs0l9X2oxa15xzH2pioOFdVBZqTYoYFAprRJGgysQ8BkxTjr2sOWknigC44G9HMtjCbtuPypmWaEqHfMtNy8+pHqFNOxz0pJzrv2blLLjYXzAKHE2xiYJsD8zAyHwbEC70kHk+RSYULB5+8eIZLA8dh5P7hQNO0JCq+K4uXUUyR+3nPdQhsFKYxMg338OJ2oSiRKQyGlQuXU/VOiuV5yOK+mXNeuvOGT+qSFaSiaL9YSCyUprDI5dWufC35a4OvdrxrYyiEwDiMCxxQyfc1I52m+JRtUcq8Sva3QJufZKWn04lNX6xOippP27ZLEF7m+LuOXBoQpuZkalbzPPHs5ha9P5T1aYtLs8nwnXLkw+nAs+0VH6Yjj+/veNFvudju6LI1brIqw3ji/v2Bi4trpjiSponDnPnwOOPVIbsdKTXMoxmgHR4fydougQi0eLg53nzzpR3MzQWpjITaPS08W4WxDXTPXzHffY0T4dntLR5hmKbimnqyZxwTTWdUOet9ZbwYLQgE3zRkEcR5cjHZ2z88mLg41c+qNGk4C57l/me10ny36UFgOA2M6Wnkury84OLiksPhsDT9zndh0xqGaQmOwQWhCbx/d79ktJoz6tbGW804s1q5fvfhjpubmyfVlJ6/5+94/bUZqIj8noj84dmvBxH5j0XkmYj8jyLyp+X/t+X7RUT+MxH5qYj8kYj8S7/RG3Hr6ZI1F9HYOomky2arf65WCIspHDb76r0v2awsZOtU+F+q2PeErtzwjBZxEU2Qk6LlFK23U1TJvyKSlvwWMGnFrrGN31/siGO0ReVaplnJNMwpMMeO/voTtrc/orl4SfQX3I0t+0OPxg2N8+hpJB1HQkroPBEQnCqnYSSrtzl9l7natHz66pbLbUCnkc47Gt8YZcgH4hxJOXPKiXkb+NHv/XM8f3nD9LDHYWWW8jRr1GyeQc47o5UV0RGFxaUzNI2p1iNPTpiKldYy+RwnPafleB+eZrGqS+ln5Xg0xaLCO53neXH9NJWoXLicWp53KBbYxYYYE8VeJ9tWDmaMq3p/VuV4tOBRLZCd9zayqrVTWyCDksV+/eab0hwpEn1Zy2hjJuA4TiMpCD5mvjnu+XB45Mu33/D53Vs+f/+eu4e9mcUFOOw/MA8nurbBN4GokKV6XjnGYSyEfxDy0g9QrZJ/kTyegMzN7cvyfQoLp9FCggP87oJme4O6ltZ5Wu+4vb7i8LinbWzQRKk+VAFPoHEN1bmVMjyhOZNmo+nNpxOeUqafZYyGXq2wjZZforooWMWc6Xdbrm+ujO/tjDW9vdjgRLna7XA4g8PISAmWjTe2jP0gY7p4IOZa9uvZL2tGx7mMDReH3hcvXvLw8EDVJwAWrP/7XL+JrfEfA/8igBgZ7Qvgv8W84f8nVf0HIvKflj//J8DfAX63/PqXgf+8/P+vuYSutxNxxUKWMPakeWTd1irg+5SgXbUkaxle0/iFinL22nXT5pysrHUOa8YZAdvV9cHawKrX+cnjgCCevhc2257D/aP50W8uaTcbK0e9DQd4URMZSREfjdCc1LKqtjWFmXyKqFqTw2WjwcQ5EluPU4fT6p3uaBvHqxfP7JBpWg7HE4fjCe/g+bNnXG82XF5tSTHy/PkzXNvw+t0d3e5iOZBWnLlMavgV+7Tyxz7tIs6LlcFVGb+yJ4bBjNKqqd23Cfpd1y0KTyvOvboRVDm+mCIppuX7zcqEpRQTJzgtG0BX3MwXiKJmaudjmRXvVl1hg/q5ULPFnefJJPpckfDDGi++tSATVBlOR7rQFjFjscmXoHh1qCiPhwPb3Xbh0DahIefE/Yd7HnNkGE7ENFtJ3XpiVhKCb1pEV2k/GzxovlV7PWnHLaInfW9qU5Xr7GTtG1CaLN0nr7jywvD+DfFwoG9aXn38MQ/7fQkCZvXhO5hztln6ZP++QjR1/QuZPMdiRa7LG1yI/rq+46o90HW9ZfclsH+4v+fF7Qu2bcfDwz2QkeRJDnzj2F5uOO73JFb+ayjYLbIe2HMt06sgxBnkZ/+391KpjqrKzc0t+UxY6P/L9V1L+H8d+DNV/ZmI/F3gXytf/y+A/xkLoH8X+C/V3tn/IiI3IvKJ/lpveDudpqJsLiJnivGrsHINhnVq5XzKRVUXxSDRp2pLqwhJbQiUh1ywglrupaTk7FB8sTCudBGeBM9f8fZNLdw7nj+74d3rd3TbK9pNjwu+uD5aEHWNA8nmmT1PxGgPvBVMpCFlGgdpzCS1jqoUaGGOihePzAWbUPM+sjJakRzZtAHRns2PP7NF5h3H4YQXYdN03F5dMgwjru84nEa8yJNN60o2N+Wp8EENIxyKMn0dYKgSb5UG4ryn3WxIKS1jmXXTrVirKTadG+PVZo2II842fdR1HUMaSNkyiIpRnmOS9f9t2xYRmvW16rOtuGrlW9b1pGpWwY1rVgw2Jpz4NZiXkVBXsvmmN9+doxs5YM/I1WBV1lGtfo7DAMNpsW6uUn5zUkgzTiHNiY6WJIHctmb/Gww/zwuXORengTVIARYkSkdbkYU7K0VVZE0OahYtqDORk8tnz+hfvsQV+5f+w5b93pwtm6bh66++QLGszuVVSyLnvDQat22zqFoBT4LQkpyoiZoAbC8vQeywEWcQ0/MXLxjHiSZHnvctgUyaB3LjSUnZ7HY8PNjoZynOC/vALuv2h0V6sWxDA9ayngVRa5BWTQewane/f2C72xXY56/e2n/d9V0D6L8D/Ffl9x+dBcWvgY/K7z8DfnH2bz4vX/s1AZSyKGQReCj67SYQ0DbLRqy413mXuJaEi1+Saski1q7t8mOoDzmbtQ1VyzICnhBDCXBCaGVRuvlVV0VOXMEBHcqm70yHM8Xitx1oGzOi815xYj+XWCc3TNzByoh1rDBNppcoZNrWESflNMz0bWC8H5bPt3ZnhTDF5TOLCH3bMA4jf/bzn9N7z2W34fpix+V2w8+++prbjz4yuTjW7K4GUe9WE7xxnM5m39dZ9/r9FswSUc+9l9YpMlUTgch16klkOfgMYzX8GTFDvCoKvJTKVOESCw6VkmaCwdMaLKqZoJiSUSw2G2nJgg073253nE6nZYR0OVxhwd2qolRWYxmM40BbqGG66ZhSJsSEzBGXlVzwP4FinJegTtKV9dNR8PqcCc4RVbibRtJyX01dahgGcwsdx6XrXF/DKtW8BIuYWZKFigWfV25VvLoZwTeCaz0R8E5wOC5vb7l69ow4J7788gtuLi7xIfDVm7eoVMk+2z+hQgywyNEt961k+UvmV4Ld7uqK/fHI1W63NB4dQpojLk34eaQlExRURlIUBnW4zQWfvXzJ6zffkBVur2/QMhVPqR7Eee73H7AKde2UpJxp2+rkmxe/qPqcRYTtbsd+v2e3tSD6//ssvIi0wL8F/Dff/jutNdN3uETkPxCRfyQi/2jKZq1QPbttEwBo4Y+tQWFtElXx27Mub8kycpl7XYF37L6XPzelo6rLxqlq6okYzUc8K4xTrGvyl676gXMFyktZUWNt1rqozd8nziPBKXnOTMOE5IzGTI4JTZYqxCkynmbG00iczejMVMhNCzSmiDrPOM0cjieOp4HH48Dj0SaR7h4O7A8Dc8w2KNA0XFxccfvsBfOcSTFz//6eN1++xiN89eVX9IUsXzHKp3jWWmY/9aN/6lUFLFWB+Uqdec+UF5inaclCK0Wp8jrrrHTbNmc/t2KYT8uxSkurmWP9+7Km6qMuJPnSea/YeHnt/f6BOmpph/HTz1tJ9JVJUMc9SdYwSkcr9dm05C4wC+RaribD1VsX2DYtnfNsfMMudGx9IKhpm3gRppTxm00JaDb+OM+TyROqlsPG3tv5AImt+TNO5Bn2p5zfP7sbqsokmVkgnpX6VVtBgdB4fvKTn/A4DMR5RgRS4XXa2WBK/MG7gsXa/qmutjV4qhq/9/LqmuM4chwnPvvxT4yVojU7tCpnlzJbZ0lCdEL2Hi+OSxHa4x53eE/feC42HSH4Zd0tz7k2uc4THNXluZc/Lu/3/EAHuL25XXji3zcL/S4Z6N8B/jdVfV3+/LqW5iLyCfBN+foXwI/P/t2PyteeXKr6D4F/CHDTPlMnsnjUGL5XBXIr7mEZZs1knPfktJaIVnqu5Pl6LQVAttMP74lxQEIDGlCKYEbKZDUs1Ecl+4bYBCagW+Tpnl5Zi1CImlK2iOCB3eWW41CMz0SQHNDc8PbNgXYT2G07SJZh+iL4fjhYt9ccRjwqBurHBJ23Be6ykmbABZpQ7ItlXRSHw4Gu7xmmidPxxPuHDzTZc4qRq4sLmtE2RlbBaaIPHgmBpuuYy3RRSgaN+KJipQo5JpzTwtFsCq1IV+dTV1gANYOtI59no7f1OdXf147+XPyQQtOsXkoYqF/HBOu/rQ6Y9uyfGg3q2euqrLy+8+zyNA5sN9vlYHOFeVCzp2WwAPusZgVjs/MpmirT9HgwEzXniCh0HdK0+JQYTwOJzMeffcbpOHC8v8fHRN+0JM1Erdm3N11QddB4mDMBG+AYTifarrfxyxKYvPfEOZr6k7f74ksAC01X1vmysaB8JqoTLYIkyKIkT1GkB0/xjpJVgu63f/d3EYH+wwM/+9nnJIw+5IM76yFY4KzUoaQwx2RQCvB4GhG/Z7u9YBwmchZub56T0mAH2zzRzxOeZJWHa0yPNEMmk8UwTa/CTQNTCKZ+j5o2gPOE4AmNIz3tZdr+9ybiXBOsesjIGfau2THnmTnOZM10bfdL+/s3ub4LD/TfZS3fAf574O+V3/894L87+/q/V7rxfxu4//X45xr8vXNPQPNcuvB1zrwpBO+qNH0ufltfpzYM6smIlPl6ZdnYWspDqCepLm8kxcSUIKZaVusvPyHOwHyBcZ6p80EK9G2LaCZPc6F8KOoy2wvDmabCRc0oc4o4gTlOZE2klJdNX83B5lzI8tm0S8Pl7dJVPie6g5KyCY0chpEhKoeYLBCNI5dNQydC0ESbwSdou5aLy4uFe+icSdelGI3alWIRO1GyCimbC2Qqls/iXHGFlJXWlGwz1Ix/qR7y+tnq4dh3nblnFrUmV90UayVSfp/S2jCKBcKpr1fpS+fBupZs+ew91L+r/8Ze12xFqrCJngXvhQtag7l3NJue0LUkCjMER5xmjvPMxe0NEgJv3r7j4XRgdiB9yynNjCRTci9dfFCyl0LXsbVQ1349aGKMC0bvvLNqrATIOnRSN75VSnXnrAXpml1XuMiy5YQdpDXFXZtT9ufHwxF8w2azQ+RcIcruaS6Bufb7nbNR2BpkBeV4OBCc4xd/+Ze8u3+gbxraGGnijJcyslpxWmoWT3GgtWeWimyfBedMgxBMqYQ4J5zK8r5F1q7vOZF+rSzq/VjXR9u0HA6HJ7Yx3+X6jTJQEdkB/wbwH559+R8A/7WI/H3gZ8C/Xb7+PwD/JvBT4Aj8+3/t61MDn508oaiP++Cpc9uABc36wJ1bCNFLx7HerIojFvpS5SWuWQwmdCABFYrAroCzbFJKqdLUu3P2ENYHIngxnMd7z5QyUuCG4/GEpshx/0C32YITNs0GFQuKTesZ55lN1zCOkSSZi4uecZzpusB4skxlmqbFizyrlckPjwcuu5a1O15H27RQtZT7hwf6jXWCEXh+fclzHOnhETfMhMbz7nHP5BzvP+y5vNiBGCGZsiCr0HBoWobTcclaVJXgZGkGLATv0JJjUWt3Qp7jk+NZz57dOXWqimsAyyJ2Yh12h1vU389ZE7XsdGeZp/d+yWYX6pUYhm2jvHmZuKpcWVgzE+/dImRTD0yjStnBnrMyzJO9pjco6Xi0CaEcDSZ4uH+woJ+UXhwpZmZNy/vscLjMkgk9FkgnaFljcXWQXZwDiiRcebNU1kiaJrIqm+2WgmXUO21HgJ5BEghZ3QJNFRr7GjSVBTaor/PJZ5+wubjh66/f4ENLjicoDURxvjxnt2Tza/KidrBHG1oZh5PBE9pxfP+OJiUcGcmJtChMnUUCUVBZaEdRA3OqpEPboxYsHW/evkOlQViDXw2m1etooVSdHeTGilmD/eXF5a9Wi/oNrt8ogKrqAXj+ra+9w7ry3/5eBf6j7/ImVvjBspg5zUVUYp1KqP9PBa+rG8uaAPWEWbUX18VjR5yUbiWFZBtTREIui60uOpNR6zpH20LKEcThxXF/d8ftze2CddZOvhMx9XogYpX58WS+PY7MMBxxbUvwF8QY6bpQhJntE2+6jnnONB40uNJdNP+nftODQk72oIfpxPX1jnj3Zrlf1QQNrHkhApoim4sdL25uUFEu2o54HLj8+CPm/YH44QM7F3ivoLTk7Gi7nmkYFgwtl0PicJoQ35TM0S3ZmhcHLhS6jSdpNim+ggPaMMLT5/JtyohUYZK2XQS0U0q4YE2sXJxMNecnJfl5IIWqwOXMt6gcqudXLNmriL3uQvbP+Sxm6JNs1QzKKmnbJrvNR7wcIj4gnf3suawDxTLl4GHsAklN87NTofctORh+mHImamZ3e83pzbvyWa1EresaLWyTNFlTVRXE4CanMCWjuYUy97/so/IfXZpKJZBmsTVXBKNxUixhlqTtSTbqgJvrDX/xFxNdWwza1GCOpmsLwZ4VPlGzFbd12FB8DUAUyUqnGTdPGAXeGlg1a1bWIm9pMGaYYwQSDiWo+TKNQdiIJ0Qh5AlIaKHL1Ow558zl1eXyrJcPxZqRu/Khq4vrdruFd3zn64cxiaRnJQclA3SGwbWhZY4zgYaK7VjGqjRtWDLKSmVyZ9zDkpYWfKso3GS74eIimgbQDVnykiGJDxxPiZutJxBIEe7ef2nd7+MdAiWzM8w1I7j+gllhmhKn/QNpHFDxtNudZSdWn9gEjfNoEhrnSElBE77YJXetJyXwm8C4T7SUzUrk8Thw1W/J08BhPNDWPEKtXMop0YaGlDOffPwxrXf0mw0P79+RQ2Bze0VSaH7yKfLpc/L/8WdM45GffPYJDx8eER9RBuuQitJtL/j400959/oNWRP7hwfSPLLd7og503Zb+u0GxVSYQvBsek+KmT//sz8DhBxHww59IKZiB1ww7gq1GL53ZreRIUabq768vFpLXK2Za81aMnVqxuAFs8eo4iiVC+wK3JNLhi7O0SxMjupGampRtTm5QzbiAAAfVklEQVR5XuYbdMAZLYvSgLHmYGUYuHpYhNL8SZHsBNd3xOB5VOicVTzaeK52F3zxxeemkp8S3gsZQVMyJaQyBqtjwkuoNW6ZqPGkeTQ5w2BNqNo8WVRPACuKHZqd/dwMOUIWwaiumVQEpVsgLFlsacoCwQnemcYsapYsMRUObDYaXSp7UpPBFLhKNQv0DrpkDT2coCIFQqgNQtMNFSprwfBhm6qCi/KWvFjm7aOaFOSsXO06NCf+9H5mahtiCPgzPY1UtHJhPXgrXm/oRYEAiwLW97l+GAGUVZququpYNiULTjHPcaHSVIOxpRQsG2Whq1R6E6UbTiXUWoCtdKGH/Z7dRU9VXvTegWacNsyjkiXxi7/8GbeXgZSsrPBOqDPWVXnmeDwQ8QzDzM9/9gsoZZhmxYnn4f4eccL2amdjmvNE17dcdj3j5Ng/HjicBlJMPHvxkm2b8W2Z8VfHQEO4aDgdjuj4SKMzKrah+367ZOVt0zIOJ073D/RdW7rcga7v0DlCzDy8+QVuTpwuGtR3jINJt/lSKuaUCV3Hy5cv+dmf/7mpjmeTzUtxNkEK55nH94ynxug+CFqsgJvg+fSTT9jtLvjH//j/BGqZXJ+0XRUDTXk1aasMi3odDoeFolMPzorDLWN4umKHsDIx7GewVCd1U83F5bNtGxtOWLQgdVkjZlRn1LYadFc1eTsEnKxULGtcmWSglsxREDb9xoYP1GCi0HdIzCSBd+/e4hcFrHKXSjqcciYnWNSQ6/sX47PmvE5xrSwToYqMLFwQUYuaWpTiM7ikxRmV0jzShXh/ngnW63d/98d8/sXX9H2PdxscnikO1ljyhcNMabiJLph1cEBMuBzRxGJ8uK4CNVV85w1HlUAjednLy+QVLNbaAmRXxXSkZMSe337W8s1x4jErc4aL612BPex51iZkvc8rI+Osmvme1w8kgLI0dUTMHK3rulULlHUKoXZgxdXyQ5byw+k6LVEbCxU3Wk6lmNGcQJR5HgsMXstMYdM43r/9hvsv3xE8bLc9ZCUlxbkE2MP03nAxNNO5hjQc+frz12VM0jyyD49H+q7HOceHd29J0wlCU3CiyHAc8KElReX26hlN55lnOGXP1kOTRnbpRO8TSYW5b/ji7t6sOmIdg/RLcHJ+KPxFJQ0jfXBc7XbEeWKnnrsvX3PlWh5j5OccGdS0UHNKiBhfzrcdn/7kJ/zFn/3Uup7ZZohzMtWraZrLvUxMY6QJLZoiOc70TUPWzNtv3vCL4XM+/fFPiNPMu7ff4NI5Xr3Sk1Dr5ralu3/+PYv7gK70nPrM62G5LJqyNioXeG0q2cZpGsvO53mikWAk7qYhSJ2EWvUSxnGkaUxyb54nK0nrdFvpKWrJaBbtAc2Luds8jbSN3Vtf2CGqyuHdHS2e61fP2Q8HfBMYTgNd55dDRnwwJwPHosSUsnBx9QoVt/CV633KWQt7fA0CNpbplgBktKxgGWyC5MrPaurEVSW9C3Uwul6bTcPhcU+/7RCfFwWtsulKPwHwJZtUxWclaMbHAckJ9QHNxSmzHncijOrIWg68NJG8o218CfqFUVN4x5TehJTPGdWamnbwJa76DV4CH8YTu505sdahiKqVYXvYPbGGeQIJfY/rBxNAq9dOionLq6t1akCedtqDGI2lujDmksGklGiCiT/UDVSVac4bLSmrWdFKpmsDKY64pjUhEc0cHu6Ypre4gq3ENDNOLQ1xCdbiHa5sTlWQHLnoAzcXHfcPkRHTw/QyEOeBttugWdnfP5IdfPzZJ4Tg+MVf/hwUuq7l9Rdl9kCElgkRX0B/4Sd/8LfoW6F1MMTXBWQXqgdRLtJqTGUyxVnx5t0FfdPy+udfwBhpVLj3I3eNJ7iO3/7kMz68f892s2WaDswp8aNPf8zD/hFRC25RTaR407U8Pu5LliLgO8Nf257T4WiBZzbjNcUCy5vXr3n+8hXTnNg2a+MGeBIonZiKvixrYbUeSSmV8nsNGudBeMW6YZ6mtalRy3fsPs3zTGiaRdqwDl7UzaW6TmR5b4pe3jm0VETVchvMrA5dZ+8F40xWuMhk/iLTPHMaTjRNa8LXWZl15ut3b0iNI44TqjBO09I5vt50NK5hzMo4mfBzzCbuouRSJdWJMLOynvO8IIuUNboKjNfGU53GsTVrbgLgg9X2Ko65ViL1GQGqwu///r/AH/3R/8rLFx3XNx3v76qaVcG1HTBHeqm224or78+4praunz4vZdZAyvb8d13DQGNapTmZb1Nl5WiBWKA0bo1NMJb98aCBaZh4/tENN3nzJBiKVOinBlFjm1SopsaG88rnu1w/mAC6TjAYf1KcQ7LN4ZqGp3VksxoOFsSU1MVWw9KFrRunNgTqKFy1fqj+VqbUk9nvH7h5/hJxcDwe8ZJwboP3impkirOVaxFysKAmouQzrqkTm2n/+KNnPB4OeArWFgwMFye0XU+aIzqPvP7FF8Q40jiH7wJpGghiiybnjHobMRS1Lfn2L//YsomsjKeTkdG1DBE4KVBBzcQtuw3e8c2bt+y/eU8oExxJhINzhH7L8x99hMwlu8ymfD4OJ3xoGPYfSPNssmeh4cWLl9x/uKMSr5umZTjsjSuakqnaq+PVx8/p2oY3r7/hNAw03vH669fc3D7j+OH9Uv5W0exftWjXr9nC9s4ykkpjqs9WoDxblpFJRPAFyqkZ6zldbRgGfCjOn39F1mEZi723qkQUo2W1VbkK1s66qhpJ/FsfxQKpvdc5RuY4W/e3aVAvENOZfY3Ztkwp0rUCpYF6eXPN6f4D+8ehjIxa1eOcspb89cgqP7UEwzJfWgJ8zVZBxHDmeSr6At6apJRnq98KKCqRvg/cXF/TByWeJpQKeRRYxRdthxRBjVXimxaVUPoLRVzn/N4gdEROBJI6DtGh3pHTTCelgixTW1LKdbtPgTEmZhwpNEgIPGsDk0jhrPry/qtbr32elGqVkReBm3+mSnjUOmO+aW20rWSgVTnaHq5b8CRYH/I0T8ZBdFLsLUyKzRSd1smlKhbhnTBHR9/2HB/fo+k5iCvTPvZ4U3DWKczKFGerKERQDbROlhPWOcccDSNsvfA3f+dH/F9/8hVIoOl35OFEHA8gAd92tlNSNLFd70jDvHSRU8kkXTIFmVBESLzfMA4D277h5urWIIiSWdVOds6ZcZytEZEzjTiC3Uh6H2g3Ww4x8slPPuPZ82d88+YdzjfsNp45j8QJNFkD6bR/QNqem5cv+erLL/j6889NsCJbmd8GYazd/u2Gh4cTPnjeff0lLgREHNtNT0qQppGrq2uOH95BwbJcEbzNKS1d/YK2kFNcG4qqTDovv1+oT1q6zLA2i8qfa1A9d7cUrDM8FQfTUDRA1wbT+ixrllwxtNroMK3JqcSoIn3oHFET2aspM9Wgo1jA0CV8LQEhCZCUNNu0nDiTUvTOxFbSlGm9pyGTppF20/PR5Q2nwehSIbgyZGH8yKRDqVZqliXlJyriysQdiiQb3s8a8cEznBLzrLiLlr7FximltMKkZn2AehNMmWdOIsxk0KoPuzadXNcT40Qes9nQjHYYOCmNLJMnKd9d/11m1xicpuJIcf5/2zu3GEmus47/vnPq0t0zszuzO7ubXXtZ2wKBgkAkykMiEEJcQhQheMlDIiTC7QVeuDygRDzxCEIIkBAJ4iKEIARCBFEkFEHIsyERkJg4TuzYsR17Y3t3dmemp7tu5+PhO6eqZyHBM9iZ2VH9V6Pprq6drlPnnK++6/+jCrB0BXXb4SUwcyk45gl5zu35AuczcJ4882SZEGKeqkuxfxnyPIc5idZOTyBiC64vmLifTXhFEe9o2tZ6cAftTRXbHLFGPGoHKTUJkl9MwBHPC73QTUnbVmZo6T7eeyvNE98HmKrlgnL9HHSmkalk8W8bwUPdLCG05K3lv3nv8DJEhC3Cr/G6PWtrUxatRrb4nOV8QV7A8sBaAbdqC6bpOjKXURSxaiJzK0n90e8bOqq6QbwnKybs7u8jaP/d3nuyvGTv7h3W188zPzig7QJt3VDmJRvTDS5unEO2zyOu4/LGFqKwMVvnznweCR4cy9198ztJoFPY3r7E/v5ezyK+rBa0IZD5nCDW+SZoMD+UczSx8MBrS16Y3+z85kVaLJDXNA2rXGRdm3ow6RB0i0IyaZervtDVnM/eb5U2SPpZXVO6oiGRaulTtZqVuSaS7dQsL3UGFUmpUT4GipL7xtaZF/NZdhoiJdqwSYHeZzesbyx/MWaIZM5zbmODEPM+nbPGe4KwWFS42YQc07oQT920iJSIV+rWBKDE9r6oDtpwXwwg0ZRf8R2LWhReAplYbnRTdywPlDI3oo9F1VKWFtRxksZiQnVzc5vd+S7qjdchdW+154GCd6gUuDy36Pf8gGXkDRUHPgl2tVQwIP5/cyE4CWTSkeeOIIKqpZwRKnCeRhx39xbgrV1OamsuYlwSSWimvUNcK6tlxf1DASVdxGp9/HFwKgQoQdHQId4WaeZWKkuSiZIihclsWwkMJW1CVa05mB96RhtRiPVGUQ22UEVoItuQamB//w75LNK7qZEdWGTVoSJM17eoD+4yX1S0Uf0nQJYn18DA1o4KVy6e45kXb+FdaUGQchKvN1jUNy+ZTiZkHuraou8qHsWRFT762S1Jpw0d59eM/EIR8sKoy0IwXkxFWTYtWVH2VULiM7KipKkbblYHTC5f4MXHv0jQwHNFzqUL21x/+DoH9ZJ2WVkHz0WF846dnVtc3N4mK3J2nt9BuwZxpjFev/4t1K2VcW5dvNjP0eXLV3Des7u7y52dHYv6Azs7O1y6eg0njjwvhuCRWjS57drBTAcQC7o0bUx27zqEwaTufZt91MxFLW4o49V4fh9USUFFZ10bxTnapjahCCsba9hgtqayKFRXtBgU7QY3Qt22SUQdMuHjfo7a3NDDHFVcUHLv6JqWrml6IZ4eGLPpBO8zi3RnOS6fUjedEdsksmQ1PodFEvrOtP7UTNCEkgk2nxUxf3WQ6E3bREJqWFY1+SLj3JrHl946egrkEol0ghFll96KAHCWqdLXj4sJkba2Ut6A4LMcPZ/TLRfQNORtIPh4YxgeMoLtPycgQWnFkwkUYh0+NSghK9hdViwDqM+GRnDByqsdjk5Z6WGm9IUHrPrao6gJh2XJ6gP5ODgVAlQA7Try6ST6MCzd9hBvpJNDCdDJXypYy46mbsxPGqtNUq9y4wxNTxgz84k+njYSUoS26bsIWvqRaSlJoJeTGUXp2L11C2lbfO3ik9VMpjw/nFKzNnGcm2Y0QZjPF7Z5YgJ0CNB0HWvTdaC1xRY5Kq0dR9anSYXQkeeZpe2kSiBisQCxp3usXbeSQ+s/XhQFTVMZAcOkZLa5iX/uBUqstn/nzg53PnObwjtK7ynKHFmbkWU5ZV5y+5VXuHTlCtceeJC93TsUxYT9vT329vZYRgbz1O7YGsNZECSxG21tXaRuOtqmoWkaJpNJX3eeUoCatiXPMlIGsGD3Ji3wngwC+rlMAYVh4cgQHY+H3D0boq9z9x6J85pKQLMs7yvU0tpKlUDWATT5yVy/1hJfqSorDQnprzUJ5f7fSv5O4Tyhg9BY3mHSbIuiAFxsX9Jye3mXvChBoNF9rl37FoJmtMFyhgkhPnQk1ujb9zqkt6qs/NSaJHZK734hrpuyzAmd0HaO+byhLEwgzudziszjMfLk6WzKtIwpdV1LJ45MpPffpjbiaT8mE1nFI8UUfGbkJF1DLNKNlU+uV5ASx0EyqTW2zFPv2ZkvCc7TCWQkt0J012U56rKeEHkIrOqhOYX0PE3C1QStfXc4vKaOiFMiQJUSa0oWktrdRbMkLct401ZVbh/LCV2I1UdBcbmjrhu6djDh0wR33eB8HxpgKY5AaGpcVti35bn1VoosPB5BsgkXrryB2y9/DamWtF3LLFjLVkXxmSfHFoBDePDyeXZ2G8Jyn2Wr4KYEKch9oMi8aSHqcD7H5y2hrQldQ5F7BHtIWKqFYvLXhGZd13EBWrmfzyx4trm5yc2bNymnM2Mzz3O6tkEXC5743OdAldZ5pAbNAtNO+Y4HHmbn5Ve4uVhQi2fz8nleuvUK165do6lq7ty5y/aly9x64XkIXUz/aKBTphsT9vcruk7JvOCzwlpAtB2vvPwybVNz7vwWWVYyKQt8Flv6Yqz2XVX1gmw1IEP066aAUDK77005SQIy+TwHLdGEaBcsih7i+6Zpeg5TEJzLqerGqsL6iHIUuKnXUyTdDShO7RpbVQ7q6vACTsruoUPal79m4vA4QrPSbgasikg15i0rmc/RkDOZeluXziNamgDE2hB3XWudKF2GJ1AtD/DFrL8Xro8Z2PW0dYXLS9Dh/vUWlM/Q0MRc5ZqyKDi/tQYoJZDpjL265fbuATtNTfBiJdDuf3YVAGJVnCDiLYVVPOocDY6smFItDvChpRDzGas4AgERJVdhJoEmOFo/YXe5pA0tQRwSkmCNOdjOk2cF4vI+NcncfYNiBUN/r2S9JO+rhVRW/Ld+yGo4Ko5CJvK6om1a6qW1s22qenD0YoumaZv4lHE9xR2kOuchqDSfz4EUZU8Ta03nVhuMpQWQbmTb1MNTEJiUBdPplC4EDpYL6rohzwu2L19BsXYTy2VFtbRqm9ATe5hW4sRxbr3koRtX2d6aUUhF6SqKLMP5nBaPZAW59xYocmLtXyMpQ57nMXnZ95pWiDmZ9hjuKHKLWmrXcfOFFwiqrM3WzM/aGqmIxmBDcmXUaqbjhdk6ru5YNNZWuG4azm9usr6+wdraFIkaZdu2XHvwOiqOxXJpY/MecQVBPU0rVI1SNw1ZXpCXRc/H6nzyIXY9ATYIdVX3XT5TCWSa757FPm5K853KocZvq4GBXgWMr1PmRXIVEFdBCIG2aYwxP2qRQKRJo/97Gs1ATVqOkz5iH1AOFgf9GtL0vau4J2jhIxGO6JDkb3yrbkXwhJi07/HeRQb4xGtZYpvc09fFr/h862oQ5qmFTVI4SCb+/7Lfqqqhqiu6ru0Jh1WV/b0GjSxjCqyVma1LMX+ojzXkXRdoYuEAIrEEuuw1+IFy0LqMNgTy2RSZlCyBRbx1EsB1QlDhAEfjcyoN1Bo5VuMelXhrxXuKSdnzPPR+dDTuvcEsT0JxVcMcyr6He6G6KlCPhtMhQNOTX4nF//Y0aWNaUtt2hG4gRtbIBpRM11S+Z50wpc/Z66KZ2cV+LuZvaiNr+jImXdsl1NWS0EZTtGlZHCzwWcbWhQtMZzPywhjcgzom65sEFZo2UNUNi4OKatlQ1S1N0/War8f6ZW+fm3Lj8gZXz3lY7tBVezhRcgcZLV29YJJnFlX0lppk7gjbOF3s5dP2VHEBdY4iVhvVdYe4gtl0jTs7d8yNsL7BjUe+le/8ru+mUytra9oaQsfMeaTreOqZLzPXWHLoPXXbsDadslgsWVtbtw6JTnBFSRusFcJkOqVqGzKfkcViBQGrFPOeyWTKlTdcAYT5/hxBrKtk38Mo9qmKDzRr+ZCEH31lkJKazJn22EVmooR+ucfNNTxuD/s107kp2LTa0jj5OHufZUp5CuYvT+tMI3PWsq776L+d+PVNP59n5FlG5jxidai9Pzc9TIYKGcsSsZ5BsSOnJAE6gUMtUCxNJ8sL2q6jrha9ayNp36lsWWLuY+gsPzeP5aG2B+zYdDqJrP5t5ML1VEsjr0Gsfff6JOeBq1ctkT2y32eZ9SPL8zzmYbe9wGyiMjK4UUyI1qGjEQfTNXS6TuU9rTpaMqos4yCfMSfjzsESSILTAnfirLHdZG0Nl+UWiEUHysWQilsOpyYNvwdhM1QlDVWF93UUHrDF3Q2tZM3UTgt74JxUXQ7/JYr/VD5WFEXUeBKbue9/i0Dhi+ins5QW6x+f0WlHXS+ZNDMkBjS6rmN3d5f19TUmZQEOpr5EgzLbOAfastifo20AafuAQfAdGjLEJRNJySQDOqZ5xkNv2GS+qJnXe7SNUtUdRZHHha0Y8143aJ6xakKjVuGco8P8Wm1rLXFNQ8u4cGErauDCwWJJ1zSsz8o+qiHOM3Hmcnhp7y7ZtGTe1kheMFtbQwWmkxKX52RiPuGmaWi7lhs3bvD8s8+yvjbDOaGpDiA0eKwWvA2Wr7m3u898f5/N8+fY3T8wzs+mxjSEjiwryHzsy53CsQyZjBCDODGfc1WLSD7GewOJicU/CdP+78QAU1CN4zESkLqpzbyPGnCnAWv3pb2Pudf2oee+7JV/jf7OlY26eu15lvWuH4dDtTO6unid0gc4h82eylqLIo/N/NSIZ5zVBkXxiGnpGRo11i61Mkls/TpUa4WYVdC1DS6TXiNN9ynR9HnnmKyVNHVrLqTIGqY+us4QXnzxRay/UsopTbSERMFq85iIvLuuHYKDAF2g9Hk/pyGAxjzOAATnqJYtXVPjcTEHmqiBmqCcTaYsqoqynMQ5sBWxGvxLe7fnEY5zYr5xO2/wkQ/ZLKvW6VFwOgSoYPmDed4/pU0jSZvHTrKFtsLqIxoFjf2f5PcEMzekNzsSm7X2PXmsVC/2kW87QtdaaWeW9Sajho753h6LfaGY5CsbMnbzjM70rqpZVhVFVZFnniL3lEVOKLroLLdrDQQkwEZplRcHneP2XhUfDPT8mkLoN5yIoyhKM79j4MV7h9OWBy5dxHGevf0DFpUx3ezv3cW5jK2LF7l+7TJPP/0Mly5tc+v2LZqm4aCzpPA8E7pqSb6xTu4LLl26xN7BnEsXLtKqsj+Pwq9uCKo8+5Wv4DVEk6XjYFmhzmrnN2Zr7O/tol1H1zaU04z9vV2C2lzt7e32D7GmGTTJ1WTmEAMjyexO1UeJ4m7wY61sLAYhmsSYHvrbJrBSYMk+D7HH09KqfKL5mngVrJPBoLWGyDLVNGbd9H15VA9/pwzRfoAMRyYOgtJipqcxtQ2CTKKmLSKRV9V8l2ZpWXJ7SiOyAIuLnWODtReOY29bq7Lq7018Jf2DwHKnU0CuF4AIrg3gA1meUxQe5zWSNndWJRSvsa4bNHR0hHuEU0CDxL1kVpP5td2hh1/vahOJAcfGqqiwHO92aRrsZFpSLxfEMDKFMzLnS5e3efyLT3Hh4ja7d3c5d+5c8qHE+2CKU6KuXF0DEpWBQZYkgZkE/+E7dxTI/ycC9VpBRPaAJ076Ol5nbAOvnPRFvI446+ODcYxnAd9ofDdU9dJR/tjp0EDhCVV9y0lfxOsJEfn0WR7jWR8fjGM8C3itx3c6gkgjRowYcR9iFKAjRowYcUycFgH6Ryd9Ad8EnPUxnvXxwTjGs4DXdHynIog0YsSIEfcjTosGOmLEiBH3HU5cgIrIO0TkCRF5UkTed9LXcxyIyHUR+ZSIfF5E/ktEfikevyAi/yQiX4q/t+JxEZHfj2P+rIi8+WRH8OohIl5E/l1EPh7fPywij8axfFhEini8jO+fjJ8/dJLX/WogIpsi8hER+YKIPC4ibztrcygivxLX6GMi8iERmdzvcygifyoiL4nIYyvHjjxvIvLeeP6XROS9r+rL+/rfE/gBPPAU8AhQAP8JvPEkr+mY47gKvDm+3gC+CLwR+C3gffH4+4DfjK/fCfwjlr37VuDRkx7DEcb6q8BfAR+P7/8GeHd8/QHgF+LrXwQ+EF+/G/jwSV/7qxjbnwM/H18XwOZZmkPgAeBpYLoydz99v88h8P3Am4HHVo4dad6AC8CX4++t+Hrr//zuEx7424BPrLx/P/D+k56Q12Bc/wD8CFYccDUeu4rluwJ8EHjPyvn9eaf5B3gQ+CTwg8DH4yJ8BcjunU/gE8Db4ussnicnPYZvMLbzUbjIPcfPzBxGAfpcFBJZnMMfPQtzCDx0jwA90rwB7wE+uHL80Hlf7+ekTfg0oQnPx2P3LaKZ8ybgUeCKqr4YP7oJXImv79dx/y7wa8RqRuAicEdVU7e41XH0Y4yf343nn1Y8DLwM/Fl0UfyxiKxxhuZQVb8K/DbwLPAiNief4ezM4SqOOm/Hms+TFqBnCiKyDvwd8Muqurv6mdpj7b5NeRCRHwNeUtXPnPS1vE7IMDPwD1X1TcAcM/16nIE53AJ+AntYXAPWgHec6EV9E/B6zttJC9CvAtdX3j8Yj913EJEcE55/qaofjYe/JiJX4+dXgZfi8ftx3N8L/LiIPAP8NWbG/x6wKSKpJHh1HP0Y4+fngVvfzAs+Ip4HnlfVR+P7j2AC9SzN4Q8DT6vqy6raAB/F5vWszOEqjjpvx5rPkxag/wZ8W4wCFpij+mMnfE1HhogI8CfA46r6OysffQxI0bz3Yr7RdPynYkTwrcDdFXPjVEJV36+qD6rqQ9g8/Yuq/iTwKeBd8bR7x5jG/q54/qnV3lT1JvCciHx7PPRDwOc5Q3OIme5vFZFZXLNpjGdiDu/BUeftE8DbRWQraupvj8e+MU6B8/edWNT6KeDXT/p6jjmG78NMhM8C/xF/3on5iz4JfAn4Z+BCPF+AP4hj/hzwlpMewxHH+wMMUfhHgH8FngT+Fijj8Ul8/2T8/JGTvu5XMa7vAT4d5/HvsWjsmZpD4DeALwCPAX8BlPf7HAIfwny6DWZJ/Nxx5g342TjWJ4GfeTXfPVYijRgxYsQxcdIm/IgRI0bctxgF6IgRI0YcE6MAHTFixIhjYhSgI0aMGHFMjAJ0xIgRI46JUYCOGDFixDExCtARI0aMOCZGATpixIgRx8R/A1DqOUw8wTEEAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.imshow(img)\n", + "plt.imshow(density_map, alpha=0.75)\n", + "plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "dldiy", + "language": "python", + "name": "dldiy" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.2" + }, + "vscode": { + "interpreter": { + "hash": "b5b25aaea419aac5c36814101d617d5039a3d8bc965da8b9de2287350d0b87c2" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/datasets/FUDAN/preapre_FUDAN.m b/datasets/FUDAN/preapre_FUDAN.m new file mode 100644 index 0000000..7da4cb8 --- /dev/null +++ b/datasets/FUDAN/preapre_FUDAN.m @@ -0,0 +1,54 @@ +clc; clear all; +dataset = 'B'; +standard_size = [768,1024]; + +att = 'test'; + +dataset_name = ['shanghaitech_part_' dataset]; +path = ['../data/ShanghaiTech_Crowd_Detecting/part_' dataset '_final/' att '_data/images/']; +output_path = '../data/768x1024RGB-k15-s4/'; +train_path_img = strcat(output_path, dataset_name,'/', att, '/img/'); +train_path_den = strcat(output_path, dataset_name,'/', att, '/den/'); + +gt_path = ['../data/ShanghaiTech_Crowd_Detecting/part_' dataset '_final/' att '_data/ground_truth/']; + +mkdir(output_path) +mkdir(train_path_img); +mkdir(train_path_den); + +if (dataset == 'A') + num_images = 300; +else + num_images = 400; +end + +for idx = 1:num_images + i = idx; + if (mod(idx,10)==0) + fprintf(1,'Processing %3d/%d files\n', idx, num_images); + end + load(strcat(gt_path, 'GT_IMG_',num2str(i),'.mat')) ; + input_img_name = strcat(path,'IMG_',num2str(i),'.jpg'); + im = imread(input_img_name); + [h, w, c] = size(im); + annPoints = image_info{1}.location; + + + rate = standard_size(1)/h; + rate_w = w*rate; + if rate_w>standard_size(2) + rate = standard_size(2)/w; + end + rate_h = double(int16(h*rate))/h; + rate_w = double(int16(w*rate))/w; + im = imresize(im,[int16(h*rate),int16(w*rate)]); + annPoints(:,1) = annPoints(:,1)*double(rate_w); + annPoints(:,2) = annPoints(:,2)*double(rate_h); + + im_density = get_density_map_gaussian(im,annPoints,15,4); + im_density = im_density(:,:,1); + + imwrite(im, [train_path_img num2str(idx) '.jpg']); + csvwrite([train_path_den num2str(idx) '.csv'], im_density); +end + diff --git a/datasets/FUDAN/setting.py b/datasets/FUDAN/setting.py new file mode 100644 index 0000000..05a5c8f --- /dev/null +++ b/datasets/FUDAN/setting.py @@ -0,0 +1,22 @@ +from easydict import EasyDict as edict + +# init +__C_FUDAN = edict() + +cfg_data = __C_FUDAN + +__C_FUDAN.STD_SIZE = (768,1024) +__C_FUDAN.TRAIN_SIZE = (576,768) +__C_FUDAN.DATA_PATH = '../ProcessedData/Fudan-UCC' + +__C_FUDAN.MEAN_STD = ([0.452016860247, 0.447249650955, 0.431981861591],[0.23242045939, 0.224925786257, 0.221840232611]) + +__C_FUDAN.LABEL_FACTOR = 1 +__C_FUDAN.LOG_PARA = 100. + +__C_FUDAN.RESUME_MODEL = ''#model path +__C_FUDAN.TRAIN_BATCH_SIZE = 6 #imgs + +__C_FUDAN.VAL_BATCH_SIZE = 6 # + + diff --git a/datasets/GCC/GCC.py b/datasets/GCC/GCC.py index 0fe42e4..eef4a2c 100644 --- a/datasets/GCC/GCC.py +++ b/datasets/GCC/GCC.py @@ -8,10 +8,18 @@ import pandas as pd -from setting import cfg_data +from datasets.GCC.setting import cfg_data + class GCC(data.Dataset): - def __init__(self, list_file, mode, main_transform=None, img_transform=None, gt_transform=None): + def __init__( + self, + list_file, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): self.crowd_level = [] self.time = [] @@ -19,10 +27,10 @@ def __init__(self, list_file, mode, main_transform=None, img_transform=None, gt_ self.file_folder = [] self.file_name = [] self.gt_cnt = [] - + with open(list_file) as f: lines = f.readlines() - + for line in lines: splited = line.strip().split() @@ -34,56 +42,70 @@ def __init__(self, list_file, mode, main_transform=None, img_transform=None, gt_ self.gt_cnt.append(int(splited[5])) self.mode = mode - self.main_transform = main_transform + self.main_transform = main_transform self.img_transform = img_transform self.gt_transform = gt_transform - self.num_samples = len(lines) - - + self.num_samples = len(lines) + def __getitem__(self, index): img, den = self.read_image_and_gt(index) - + if self.main_transform is not None: - img, den = self.main_transform(img,den) + img, den = self.main_transform(img, den) if self.img_transform is not None: img = self.img_transform(img) - - # den = torch.from_numpy(np.array(den, dtype=np.float32)) + # den = torch.from_numpy(np.array(den, dtype=np.float32)) if self.gt_transform is not None: den = self.gt_transform(den) - - if self.mode == 'train': - return img, den, - elif self.mode == 'test': - attributes_pt = torch.from_numpy(np.array([int(self.crowd_level[index]),int(self.time[index]),int(self.weather[index])])) + + if self.mode == "train": + return ( + img, + den, + ) + elif self.mode == "test": + attributes_pt = torch.from_numpy( + np.array( + [ + int(self.crowd_level[index]), + int(self.time[index]), + int(self.weather[index]), + ] + ) + ) return img, den, attributes_pt else: - print('invalid data mode!!!') + print("invalid data mode!!!") def __len__(self): return self.num_samples - def read_image_and_gt(self,index): + def read_image_and_gt(self, index): - img_path = os.path.join(cfg_data.DATA_PATH+self.file_folder[index], 'pngs_544_960', self.file_name[index]+'.png') + img_path = os.path.join( + cfg_data.DATA_PATH + self.file_folder[index], + "pngs_544_960", + self.file_name[index] + ".png", + ) - den_map_path = os.path.join(cfg_data.DATA_PATH+self.file_folder[index], 'csv_den_maps_' + cfg_data.DATA_GT +'_544_960', self.file_name[index]+'.csv') + den_map_path = os.path.join( + cfg_data.DATA_PATH + self.file_folder[index], + "csv_den_maps_" + cfg_data.DATA_GT + "_544_960", + self.file_name[index] + ".csv", + ) img = Image.open(img_path) - den_map = pd.read_csv(den_map_path, sep=',',header=None).values + den_map = pd.read_csv(den_map_path, sep=",", header=None).values - # den_map = sio.loadmat(den_map_path)['den_map'] + # den_map = sio.loadmat(den_map_path)['den_map'] den_map = den_map.astype(np.float32, copy=False) den_map = Image.fromarray(den_map) - - return img, den_map + return img, den_map def get_num_samples(self): - return self.num_samples - - \ No newline at end of file + return self.num_samples diff --git a/datasets/GCC/loading_data.py b/datasets/GCC/loading_data.py index 2ccf054..1a8ec5b 100644 --- a/datasets/GCC/loading_data.py +++ b/datasets/GCC/loading_data.py @@ -1,47 +1,69 @@ import torchvision.transforms as standard_transforms from torch.utils.data import DataLoader import misc.transforms as own_transforms -from .GCC import GCC -from .setting import cfg_data +from datasets.GCC.GCC import GCC +from datasets.GCC.setting import cfg_data as default_cfg_data import torch import random - -def loading_data(): +def loading_data(cfg_data): mean_std = cfg_data.MEAN_STD log_para = cfg_data.LOG_PARA - train_main_transform = own_transforms.Compose([ - # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), - own_transforms.RandomHorizontallyFlip() - ]) - img_transform = standard_transforms.Compose([ - standard_transforms.ToTensor(), - standard_transforms.Normalize(*mean_std) - ]) - gt_transform = standard_transforms.Compose([ - own_transforms.LabelNormalize(log_para) - ]) - restore_transform = standard_transforms.Compose([ - own_transforms.DeNormalize(*mean_std), - standard_transforms.ToPILImage() - ]) - - if cfg_data.VAL_MODE=='rd': - test_list = 'test_list.txt' - train_list = 'train_list.txt' - elif cfg_data.VAL_MODE=='cc': - test_list = 'cross_camera_test_list.txt' - train_list = 'cross_camera_train_list.txt' - elif cfg_data.VAL_MODE=='cl': - test_list = 'cross_location_test_list.txt' - train_list = 'cross_location_train_list.txt' + train_main_transform = own_transforms.Compose( + [ + # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), + own_transforms.RandomHorizontallyFlip() + ] + ) + img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] + ) + gt_transform = standard_transforms.Compose( + [own_transforms.LabelNormalize(log_para)] + ) + restore_transform = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] + ) + if cfg_data.VAL_MODE == "rd": + test_list = "test_list.txt" + train_list = "train_list.txt" + elif cfg_data.VAL_MODE == "cc": + test_list = "cross_camera_test_list.txt" + train_list = "cross_camera_train_list.txt" + elif cfg_data.VAL_MODE == "cl": + test_list = "cross_location_test_list.txt" + train_list = "cross_location_train_list.txt" - train_set = GCC(cfg_data.DATA_PATH+'/txt_list/' + train_list, 'train',main_transform=train_main_transform, img_transform=img_transform, gt_transform=gt_transform) - train_loader = DataLoader(train_set, batch_size=cfg_data.TRAIN_BATCH_SIZE, num_workers=8, shuffle=True, drop_last=True) + train_set = GCC( + cfg_data.DATA_PATH + "/txt_list/" + train_list, + "train", + main_transform=train_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + train_loader = DataLoader( + train_set, + batch_size=cfg_data.TRAIN_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=True, + ) - val_set = GCC(cfg_data.DATA_PATH+'/txt_list/'+ test_list, 'test', main_transform=None, img_transform=img_transform, gt_transform=gt_transform) - val_loader = DataLoader(val_set, batch_size=cfg_data.VAL_BATCH_SIZE, num_workers=8, shuffle=True, drop_last=False) + val_set = GCC( + cfg_data.DATA_PATH + "/txt_list/" + test_list, + "test", + main_transform=None, + img_transform=img_transform, + gt_transform=gt_transform, + ) + val_loader = DataLoader( + val_set, + batch_size=cfg_data.VAL_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=False, + ) return train_loader, val_loader, restore_transform diff --git a/datasets/Mall/Mall.py b/datasets/Mall/Mall.py index d55a0e8..7334f42 100644 --- a/datasets/Mall/Mall.py +++ b/datasets/Mall/Mall.py @@ -10,53 +10,66 @@ from config import cfg + class Mall(data.Dataset): - def __init__(self, data_path, mode, main_transform=None, img_transform=None, gt_transform=None): - self.img_path = data_path + '/img' - self.gt_path = data_path + '/den' - self.data_files = [filename for filename in os.listdir(self.img_path) \ - if os.path.isfile(os.path.join(self.img_path,filename))] - self.num_samples = len(self.data_files) - self.main_transform=main_transform + def __init__( + self, + data_path, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): + self.img_path = data_path + "/img" + self.gt_path = data_path + "/den" + self.data_files = [ + filename + for filename in os.listdir(self.img_path) + if os.path.isfile(os.path.join(self.img_path, filename)) + ] + self.num_samples = len(self.data_files) + self.main_transform = main_transform self.img_transform = img_transform self.gt_transform = gt_transform self.mode = mode - if self.mode is 'train': - print('[Mall DATASET]: %d training images.' % (self.num_samples)) - if self.mode is 'test': - print('[Mall DATASET]: %d testing images.' % (self.num_samples)) - + if self.mode is "train": + print("[Mall DATASET]: %d training images." % (self.num_samples)) + if self.mode is "test": + print("[Mall DATASET]: %d testing images." % (self.num_samples)) + def __getitem__(self, index): # print self.data_files[index] fname = self.data_files[index] - img, den = self.read_image_and_gt(fname) + img, den = self.read_image_and_gt(fname) if self.main_transform is not None: - img, den = self.main_transform(img,den) + img, den = self.main_transform(img, den) if self.img_transform is not None: - img = self.img_transform(img) + img = self.img_transform(img) if self.gt_transform is not None: - den = self.gt_transform(den) + den = self.gt_transform(den) return img, den def __len__(self): return self.num_samples - def read_image_and_gt(self,fname): - img = Image.open(os.path.join(self.img_path,fname)) - if img.mode == 'L': - img = img.convert('RGB') + def read_image_and_gt(self, fname): + img = Image.open(os.path.join(self.img_path, fname)) + if img.mode == "L": + img = img.convert("RGB") # den = sio.loadmat(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.mat')) # den = den['map'] - den = pd.read_csv(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.csv'), sep=',',header=None).values - - den = den.astype(np.float32, copy=False) - den = Image.fromarray(den) - return img, den + den = pd.read_csv( + os.path.join(self.gt_path, os.path.splitext(fname)[0] + ".csv"), + sep=",", + header=None, + ).values + + den = den.astype(np.float32, copy=False) + den = Image.fromarray(den) + return img, den def get_num_samples(self): - return self.num_samples - - + return self.num_samples diff --git a/datasets/Mall/loading_data.py b/datasets/Mall/loading_data.py index 94a070d..16f4823 100644 --- a/datasets/Mall/loading_data.py +++ b/datasets/Mall/loading_data.py @@ -1,39 +1,63 @@ import torchvision.transforms as standard_transforms from torch.utils.data import DataLoader + # from misc.data import DataLoader import misc.transforms as own_transforms -from .Mall import Mall -from .setting import cfg_data +from datasets.Mall.Mall import Mall +from datasets.Mall.setting import cfg_data as default_cfg_data import torch -def loading_data(train_mode): +def loading_data(train_mode, cfg_data): mean_std = cfg_data.MEAN_STD log_para = cfg_data.LOG_PARA - train_main_transform = own_transforms.Compose([ - # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), - own_transforms.RandomHorizontallyFlip() - ]) - img_transform = standard_transforms.Compose([ - standard_transforms.ToTensor(), - standard_transforms.Normalize(*mean_std) - ]) - gt_transform = standard_transforms.Compose([ - own_transforms.LabelNormalize(log_para) - ]) - restore_transform = standard_transforms.Compose([ - own_transforms.DeNormalize(*mean_std), - standard_transforms.ToPILImage() - ]) + train_main_transform = own_transforms.Compose( + [ + # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), + own_transforms.RandomHorizontallyFlip() + ] + ) + img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] + ) + gt_transform = standard_transforms.Compose( + [own_transforms.LabelNormalize(log_para)] + ) + restore_transform = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] + ) train_loader = None - if train_mode == 'DA': - train_set = Mall(cfg_data.DATA_PATH+'/train', 'train',main_transform=train_main_transform, img_transform=img_transform, gt_transform=gt_transform) - train_loader = DataLoader(train_set, batch_size=cfg_data.TRAIN_BATCH_SIZE, num_workers=0, shuffle=True, drop_last=True) - + if train_mode == "DA": + train_set = Mall( + cfg_data.DATA_PATH + "/train", + "train", + main_transform=train_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + train_loader = DataLoader( + train_set, + batch_size=cfg_data.TRAIN_BATCH_SIZE, + num_workers=0, + shuffle=True, + drop_last=True, + ) - val_set = Mall(cfg_data.DATA_PATH+'/test', 'test', main_transform=None, img_transform=img_transform, gt_transform=gt_transform) - val_loader = DataLoader(val_set, batch_size=cfg_data.VAL_BATCH_SIZE, num_workers=0, shuffle=False, drop_last=False) + val_set = Mall( + cfg_data.DATA_PATH + "/test", + "test", + main_transform=None, + img_transform=img_transform, + gt_transform=gt_transform, + ) + val_loader = DataLoader( + val_set, + batch_size=cfg_data.VAL_BATCH_SIZE, + num_workers=0, + shuffle=False, + drop_last=False, + ) return train_loader, val_loader, restore_transform diff --git a/datasets/QNRF/QNRF.py b/datasets/QNRF/QNRF.py index 2c3294e..bdcb786 100644 --- a/datasets/QNRF/QNRF.py +++ b/datasets/QNRF/QNRF.py @@ -9,45 +9,58 @@ import pandas as pd + class QNRF(data.Dataset): - def __init__(self, data_path, mode, main_transform=None, img_transform=None, gt_transform=None): - self.img_path = data_path + '/img' - self.gt_path = data_path + '/den' - self.data_files = [filename for filename in os.listdir(self.img_path) \ - if os.path.isfile(os.path.join(self.img_path,filename))] - self.num_samples = len(self.data_files) - self.main_transform=main_transform + def __init__( + self, + data_path, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): + self.img_path = data_path + "/img" + self.gt_path = data_path + "/den" + self.data_files = [ + filename + for filename in os.listdir(self.img_path) + if os.path.isfile(os.path.join(self.img_path, filename)) + ] + self.num_samples = len(self.data_files) + self.main_transform = main_transform self.img_transform = img_transform - self.gt_transform = gt_transform - + self.gt_transform = gt_transform + def __getitem__(self, index): fname = self.data_files[index] - img, den = self.read_image_and_gt(fname) + img, den = self.read_image_and_gt(fname) if self.main_transform is not None: - img, den = self.main_transform(img,den) + img, den = self.main_transform(img, den) if self.img_transform is not None: - img = self.img_transform(img) + img = self.img_transform(img) if self.gt_transform is not None: - den = self.gt_transform(den) + den = self.gt_transform(den) return img, den def __len__(self): return self.num_samples - def read_image_and_gt(self,fname): - img = Image.open(os.path.join(self.img_path,fname)) - if img.mode == 'L': - img = img.convert('RGB') + def read_image_and_gt(self, fname): + img = Image.open(os.path.join(self.img_path, fname)) + if img.mode == "L": + img = img.convert("RGB") # den = sio.loadmat(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.mat')) # den = den['map'] - den = pd.read_csv(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.csv'), sep=',',header=None).values - - den = den.astype(np.float32, copy=False) - den = Image.fromarray(den) - return img, den + den = pd.read_csv( + os.path.join(self.gt_path, os.path.splitext(fname)[0] + ".csv"), + sep=",", + header=None, + ).values + + den = den.astype(np.float32, copy=False) + den = Image.fromarray(den) + return img, den def get_num_samples(self): - return self.num_samples - - \ No newline at end of file + return self.num_samples diff --git a/datasets/QNRF/loading_data.py b/datasets/QNRF/loading_data.py index 8867800..70fbfe3 100644 --- a/datasets/QNRF/loading_data.py +++ b/datasets/QNRF/loading_data.py @@ -1,41 +1,52 @@ import torchvision.transforms as standard_transforms from torch.utils.data import DataLoader import misc.transforms as own_transforms -from .QNRF import QNRF -from .setting import cfg_data +from datasets.QNRF.QNRF import QNRF +from datasets.QNRF.setting import cfg_data as default_cfg_data import torch import random -def get_min_size(batch): + +def get_min_size(batch, cfg_data): min_ht = cfg_data.TRAIN_SIZE[0] min_wd = cfg_data.TRAIN_SIZE[1] for i_sample in batch: - - _,ht,wd = i_sample.shape - if ht1: - train_loader = DataLoader(train_set, batch_size=cfg_data.TRAIN_BATCH_SIZE, num_workers=8, collate_fn=SHHA_collate, shuffle=True, drop_last=True) - - - - val_set = SHHA(cfg_data.DATA_PATH+'/test', 'test', main_transform=None, img_transform=img_transform, gt_transform=gt_transform) - val_loader = DataLoader(val_set, batch_size=cfg_data.VAL_BATCH_SIZE, num_workers=8, shuffle=True, drop_last=False) + train_main_transform = own_transforms.Compose( + [own_transforms.RandomHorizontallyFlip()] + ) + img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] + ) + gt_transform = standard_transforms.Compose( + [own_transforms.GTScaleDown(factor), own_transforms.LabelNormalize(log_para)] + ) + restore_transform = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] + ) + + train_set = SHHA( + cfg_data.DATA_PATH + "/train", + "train", + main_transform=train_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + train_loader = None + if cfg_data.TRAIN_BATCH_SIZE == 1: + train_loader = DataLoader( + train_set, batch_size=1, num_workers=8, shuffle=True, drop_last=True + ) + elif cfg_data.TRAIN_BATCH_SIZE > 1: + train_loader = DataLoader( + train_set, + batch_size=cfg_data.TRAIN_BATCH_SIZE, + num_workers=8, + collate_fn=lambda x: SHHA_collate(x, cfg_data), + shuffle=True, + drop_last=True, + ) + + val_set = SHHA( + cfg_data.DATA_PATH + "/test", + "test", + main_transform=None, + img_transform=img_transform, + gt_transform=gt_transform, + ) + val_loader = DataLoader( + val_set, + batch_size=cfg_data.VAL_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=False, + ) return train_loader, val_loader, restore_transform diff --git a/datasets/SHHB/SHHB.py b/datasets/SHHB/SHHB.py index 97df132..7e15d97 100644 --- a/datasets/SHHB/SHHB.py +++ b/datasets/SHHB/SHHB.py @@ -11,45 +11,58 @@ from config import cfg + class SHHB(data.Dataset): - def __init__(self, data_path, mode, main_transform=None, img_transform=None, gt_transform=None): - self.img_path = data_path + '/img' - self.gt_path = data_path + '/den' - self.data_files = [filename for filename in os.listdir(self.img_path) \ - if os.path.isfile(os.path.join(self.img_path,filename))] - self.num_samples = len(self.data_files) - self.main_transform=main_transform + def __init__( + self, + data_path, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): + self.img_path = data_path + "/img" + self.gt_path = data_path + "/den" + self.data_files = [ + filename + for filename in os.listdir(self.img_path) + if os.path.isfile(os.path.join(self.img_path, filename)) + ] + self.num_samples = len(self.data_files) + self.main_transform = main_transform self.img_transform = img_transform - self.gt_transform = gt_transform - + self.gt_transform = gt_transform + def __getitem__(self, index): fname = self.data_files[index] - img, den = self.read_image_and_gt(fname) + img, den = self.read_image_and_gt(fname) if self.main_transform is not None: - img, den = self.main_transform(img,den) + img, den = self.main_transform(img, den) if self.img_transform is not None: - img = self.img_transform(img) + img = self.img_transform(img) if self.gt_transform is not None: - den = self.gt_transform(den) + den = self.gt_transform(den) return img, den def __len__(self): return self.num_samples - def read_image_and_gt(self,fname): - img = Image.open(os.path.join(self.img_path,fname)) - if img.mode == 'L': - img = img.convert('RGB') + def read_image_and_gt(self, fname): + img = Image.open(os.path.join(self.img_path, fname)) + if img.mode == "L": + img = img.convert("RGB") # den = sio.loadmat(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.mat')) # den = den['map'] - den = pd.read_csv(os.path.join(self.gt_path,os.path.splitext(fname)[0] + '.csv'), sep=',',header=None).values - - den = den.astype(np.float32, copy=False) - den = Image.fromarray(den) - return img, den + den = pd.read_csv( + os.path.join(self.gt_path, os.path.splitext(fname)[0] + ".csv"), + sep=",", + header=None, + ).values + + den = den.astype(np.float32, copy=False) + den = Image.fromarray(den) + return img, den def get_num_samples(self): - return self.num_samples - - \ No newline at end of file + return self.num_samples diff --git a/datasets/SHHB/loading_data.py b/datasets/SHHB/loading_data.py index 0aa446c..ee03069 100644 --- a/datasets/SHHB/loading_data.py +++ b/datasets/SHHB/loading_data.py @@ -1,39 +1,62 @@ import torchvision.transforms as standard_transforms from torch.utils.data import DataLoader import misc.transforms as own_transforms -from .SHHB import SHHB -from .setting import cfg_data +from datasets.SHHB.SHHB import SHHB +from datasets.SHHB.setting import cfg_data as default_cfg_data import torch -def loading_data(): +def loading_data(cfg_data): mean_std = cfg_data.MEAN_STD log_para = cfg_data.LOG_PARA - train_main_transform = own_transforms.Compose([ - #own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), - own_transforms.RandomHorizontallyFlip() - ]) - val_main_transform = own_transforms.Compose([ - own_transforms.RandomCrop(cfg_data.TRAIN_SIZE) - ]) + train_main_transform = own_transforms.Compose( + [ + # own_transforms.RandomCrop(cfg_data.TRAIN_SIZE), + own_transforms.RandomHorizontallyFlip() + ] + ) + val_main_transform = own_transforms.Compose( + [own_transforms.RandomCrop(cfg_data.TRAIN_SIZE)] + ) val_main_transform = None - img_transform = standard_transforms.Compose([ - standard_transforms.ToTensor(), - standard_transforms.Normalize(*mean_std) - ]) - gt_transform = standard_transforms.Compose([ - own_transforms.LabelNormalize(log_para) - ]) - restore_transform = standard_transforms.Compose([ - own_transforms.DeNormalize(*mean_std), - standard_transforms.ToPILImage() - ]) + img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] + ) + gt_transform = standard_transforms.Compose( + [own_transforms.LabelNormalize(log_para)] + ) + restore_transform = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] + ) - train_set = SHHB(cfg_data.DATA_PATH+'/train', 'train',main_transform=train_main_transform, img_transform=img_transform, gt_transform=gt_transform) - train_loader = DataLoader(train_set, batch_size=cfg_data.TRAIN_BATCH_SIZE, num_workers=8, shuffle=True, drop_last=True) - + train_set = SHHB( + cfg_data.DATA_PATH + "/train", + "train", + main_transform=train_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + train_loader = DataLoader( + train_set, + batch_size=cfg_data.TRAIN_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=True, + ) - val_set = SHHB(cfg_data.DATA_PATH+'/test', 'test', main_transform=val_main_transform, img_transform=img_transform, gt_transform=gt_transform) - val_loader = DataLoader(val_set, batch_size=cfg_data.VAL_BATCH_SIZE, num_workers=8, shuffle=True, drop_last=False) + val_set = SHHB( + cfg_data.DATA_PATH + "/test", + "test", + main_transform=val_main_transform, + img_transform=img_transform, + gt_transform=gt_transform, + ) + val_loader = DataLoader( + val_set, + batch_size=cfg_data.VAL_BATCH_SIZE, + num_workers=8, + shuffle=True, + drop_last=False, + ) return train_loader, val_loader, restore_transform diff --git a/datasets/UCF50/UCF50.py b/datasets/UCF50/UCF50.py index 16ba61c..1ba3381 100644 --- a/datasets/UCF50/UCF50.py +++ b/datasets/UCF50/UCF50.py @@ -9,61 +9,68 @@ class UCF50(data.Dataset): - def __init__(self, data_path, folder, mode, main_transform=None, img_transform=None, gt_transform=None): - self.img_path = data_path + '/img' - self.gt_path = data_path + '/den' + def __init__( + self, + data_path, + folder, + mode, + main_transform=None, + img_transform=None, + gt_transform=None, + ): + self.img_path = data_path + "/img" + self.gt_path = data_path + "/den" self.mode = mode self.img_files = [] self.gt_files = [] for i_folder in folder: - folder_img = self.img_path + '/' + str(i_folder) - folder_gt = self.gt_path + '/' + str(i_folder) + folder_img = self.img_path + "/" + str(i_folder) + folder_gt = self.gt_path + "/" + str(i_folder) for filename in os.listdir(folder_img): - if os.path.isfile(os.path.join(folder_img,filename)): - self.img_files.append(folder_img + '/' + filename) - self.gt_files.append(folder_gt + '/' + filename.split('.')[0] + '.csv') + if os.path.isfile(os.path.join(folder_img, filename)): + self.img_files.append(folder_img + "/" + filename) + self.gt_files.append( + folder_gt + "/" + filename.split(".")[0] + ".csv" + ) - self.num_samples = len(self.img_files) + self.num_samples = len(self.img_files) self.mode = mode - self.main_transform=main_transform + self.main_transform = main_transform self.img_transform = img_transform self.gt_transform = gt_transform - - - + def __getitem__(self, index): img, den = self.read_image_and_gt(index) - + if self.main_transform is not None: - img, den = self.main_transform(img,den) + img, den = self.main_transform(img, den) if self.img_transform is not None: img = self.img_transform(img) if self.gt_transform is not None: - den = self.gt_transform(den) - + den = self.gt_transform(den) + return img, den def __len__(self): return self.num_samples - def read_image_and_gt(self,index): - img = Image.open(os.path.join(self.img_path,self.img_files[index])) - if img.mode == 'L': - img = img.convert('RGB') + def read_image_and_gt(self, index): + img = Image.open(os.path.join(self.img_path, self.img_files[index])) + if img.mode == "L": + img = img.convert("RGB") - den = pd.read_csv(os.path.join(self.gt_path,self.gt_files[index]), sep=',',header=None).values + den = pd.read_csv( + os.path.join(self.gt_path, self.gt_files[index]), sep=",", header=None + ).values den = den.astype(np.float32, copy=False) den = Image.fromarray(den) - - return img, den + return img, den def get_num_samples(self): - return self.num_samples - - + return self.num_samples diff --git a/datasets/UCF50/loading_data.py b/datasets/UCF50/loading_data.py index f68c99f..d3f8c3f 100644 --- a/datasets/UCF50/loading_data.py +++ b/datasets/UCF50/loading_data.py @@ -1,41 +1,52 @@ import torchvision.transforms as standard_transforms from torch.utils.data import DataLoader import misc.transforms as own_transforms -from .UCF50 import UCF50 -from .setting import cfg_data +from datasets.UCF50.UCF50 import UCF50 +from datasets.UCF50.setting import cfg_data as default_cfg_data import torch import random -def get_min_size(batch): + +def get_min_size(batch, cfg_data): min_ht = cfg_data.TRAIN_SIZE[0] min_wd = cfg_data.TRAIN_SIZE[1] for i_sample in batch: - - _,ht,wd = i_sample.shape - if ht= self.c_size + assert len(keypoints) > 0 + i, j, h, w = random_crop(ht, wd, self.c_size, self.c_size) + img = F.crop(img, i, j, h, w) + + nearest_dis = np.clip(0.8*keypoints[:, 2], 4.0, 40.0) + points_left_up = keypoints[:, :2] - nearest_dis[:, None] / 2.0 + points_right_down = keypoints[:, :2] + nearest_dis[:, None] / 2.0 + bbox = np.concatenate((points_left_up, points_right_down), axis=1) + inner_area = cal_innner_area(j, i, j+w, i+h, bbox) + origin_area = nearest_dis * nearest_dis + ratio = np.clip(1.0 * inner_area / origin_area, 0.0, 1.0) + mask = (ratio >= 0.5) + keypoints = keypoints[mask] + keypoints = keypoints[:, :2] - [j, i] # change coodinate + target = np.ones(len(keypoints)) + + if len(keypoints) > 0: + if random.random() > 0.5: + img = F.hflip(img) + keypoints[:, 0] = w - keypoints[:, 0] + else: + if random.random() > 0.5: + img = F.hflip(img) + return self.trans(img), torch.from_numpy(keypoints.copy()).float(), \ + torch.from_numpy(target.copy()).float(), st_size diff --git a/dropin_config.py b/dropin_config.py new file mode 100644 index 0000000..7b38f8c --- /dev/null +++ b/dropin_config.py @@ -0,0 +1,42 @@ +import os +from easydict import EasyDict as edict +import time +import torch + +# For easy copy-paste in a colab environment +__C = edict() +cfg = __C +__C.SEED = 3035 +__C.DATASET = 'SHHB' +if __C.DATASET == 'UCF50': + from datasets.UCF50.setting import cfg_data + __C.VAL_INDEX = cfg_data.VAL_INDEX +if __C.DATASET == 'GCC': + from datasets.GCC.setting import cfg_data + __C.VAL_MODE = cfg_data.VAL_MODE +__C.NET = 'Res101_SFCN' +__C.PRE_GCC = False +__C.PRE_GCC_MODEL = 'path to model' +__C.RESUME = False +__C.RESUME_PATH = './exp/04-25_09-19_SHHB_VGG_1e-05/latest_state.pth' +__C.GPU_ID = [0,1] +__C.LR = 1e-5 +__C.LR_DECAY = 0.995 +__C.LR_DECAY_START = -1 +__C.NUM_EPOCH_LR_DECAY = 1 +__C.MAX_EPOCH = 200 +__C.LAMBDA_1 = 1e-4 +__C.PRINT_FREQ = 10 +now = time.strftime("%m-%d_%H-%M", time.localtime()) +__C.EXP_NAME = now \ + + '_' + __C.DATASET \ + + '_' + __C.NET \ + + '_' + str(__C.LR) +if __C.DATASET == 'UCF50': + __C.EXP_NAME += '_' + str(__C.VAL_INDEX) +if __C.DATASET == 'GCC': + __C.EXP_NAME += '_' + __C.VAL_MODE +__C.EXP_PATH = './exp' +__C.VAL_DENSE_START = 50 +__C.VAL_FREQ = 10 +__C.VISIBLE_NUM_IMGS = 1 \ No newline at end of file diff --git a/losses/__init__.py b/losses/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/losses/__init__.py @@ -0,0 +1 @@ + diff --git a/losses/bay_loss.py b/losses/bay_loss.py new file mode 100644 index 0000000..a656592 --- /dev/null +++ b/losses/bay_loss.py @@ -0,0 +1,30 @@ +from torch.nn.modules import Module +import torch + +class Bay_Loss(Module): + def __init__(self, use_background, device): + super(Bay_Loss, self).__init__() + self.device = device + self.use_bg = use_background + + def forward(self, prob_list, target_list, pre_density): + loss = 0 + for idx, prob in enumerate(prob_list): # iterative through each sample + if prob is None: # image contains no annotation points + pre_count = torch.sum(pre_density[idx]) + target = torch.zeros((1,), dtype=torch.float32, device=self.device) + else: + N = len(prob) + if self.use_bg: + target = torch.zeros((N,), dtype=torch.float32, device=self.device) + target[:-1] = target_list[idx] + else: + target = target_list[idx] + pre_count = torch.sum(pre_density[idx].view((1, -1)) * prob, dim=1) # flatten into vector + + loss += torch.sum(torch.abs(target - pre_count)) + loss = loss / len(prob_list) + return loss + + + diff --git a/losses/post_prob.py b/losses/post_prob.py new file mode 100644 index 0000000..c634fc7 --- /dev/null +++ b/losses/post_prob.py @@ -0,0 +1,52 @@ +import torch +from torch.nn import Module + +class Post_Prob(Module): + def __init__(self, sigma, c_size, stride, background_ratio, use_background, device): + super(Post_Prob, self).__init__() + assert c_size % stride == 0 + + self.sigma = sigma + self.bg_ratio = background_ratio + self.device = device + # coordinate is same to image space, set to constant since crop size is same + self.cood = torch.arange(0, c_size, step=stride, + dtype=torch.float32, device=device) + stride / 2 + self.cood.unsqueeze_(0) + self.softmax = torch.nn.Softmax(dim=0) + self.use_bg = use_background + + def forward(self, points, st_sizes): + num_points_per_image = [len(points_per_image) for points_per_image in points] + all_points = torch.cat(points, dim=0) + + if len(all_points) > 0: + x = all_points[:, 0].unsqueeze_(1) + y = all_points[:, 1].unsqueeze_(1) + x_dis = -2 * torch.matmul(x, self.cood) + x * x + self.cood * self.cood + y_dis = -2 * torch.matmul(y, self.cood) + y * y + self.cood * self.cood + y_dis.unsqueeze_(2) + x_dis.unsqueeze_(1) + dis = y_dis + x_dis + dis = dis.view((dis.size(0), -1)) + + dis_list = torch.split(dis, num_points_per_image) + prob_list = [] + for dis, st_size in zip(dis_list, st_sizes): + if len(dis) > 0: + if self.use_bg: + min_dis = torch.clamp(torch.min(dis, dim=0, keepdim=True)[0], min=0.0) + bg_dis = (st_size * self.bg_ratio) ** 2 / (min_dis + 1e-5) + dis = torch.cat([dis, bg_dis], 0) # concatenate background distance to the last + dis = -dis / (2.0 * self.sigma ** 2) + prob = self.softmax(dis) + else: + prob = None + prob_list.append(prob) + else: + prob_list = [] + for _ in range(len(points)): + prob_list.append(None) + return prob_list + + diff --git a/misc/bay_loss.py b/misc/bay_loss.py new file mode 100644 index 0000000..e2c48c9 --- /dev/null +++ b/misc/bay_loss.py @@ -0,0 +1,58 @@ +import torch +import torch.nn as nn +from torch.nn.parameter import Parameter +from torch.nn import functional as F +from torch.autograd import Variable +from torch.nn.modules.loss import _Loss +from torch.nn.modules import Module +from post_prob import Post_Prob +from bay_loss_trainer import parse_args + +import numpy as np + +class Bay_Loss(_Loss): + def __init__(self, use_background, device): + super(Bay_Loss, self).__init__() + self.device = device + self.use_bg = use_background + + def forward(self, prob_list, target_list, pre_density): + loss = 0 + for idx, prob in enumerate(prob_list): # iterative through each sample + if prob is None: # image contains no annotation points + pre_count = torch.sum(pre_density[idx]) + target = torch.zeros((1,), dtype=torch.float32, device=self.device) + else: + N = len(prob) + if self.use_bg: + target = torch.zeros((N,), dtype=torch.float32, device=self.device) + target[:-1] = target_list[idx] + else: + target = target_list[idx] + pre_count = torch.sum(pre_density[idx].view((1, -1)) * prob, dim=1) # flatten into vector + + loss += torch.sum(torch.abs(target - pre_count)) + loss = loss / len(prob_list) + + return loss + +if __name__ == "__main__": + args=parse_args() + data = torch.zeros(1, 1, 1, 1) + data += 0.001 + target = torch.zeros(1, 1, 1, 1) + data = Variable(data, requires_grad=True) + target = Variable(target) + device=torch.device("cpu") + post_prob=Post_Prob(args.sigma, + args.crop_size, + args.downsample_ratio, + args.background_ratio, + args.use_background, + device) + prob_list = post_prob(points, st_sizes) + model = Bay_Loss(True,device) + loss = model(post_prob,data, target) + loss.backward() + print(loss) + print(data.grad) \ No newline at end of file diff --git a/misc/bay_loss_trainer.py b/misc/bay_loss_trainer.py new file mode 100644 index 0000000..a5e77e9 --- /dev/null +++ b/misc/bay_loss_trainer.py @@ -0,0 +1,59 @@ +#from misc.bay_loss import RegTrainer +import argparse +import os +import torch +args = None + +def parse_args(): + parser = argparse.ArgumentParser(description='Train ') + parser.add_argument('--data-dir', default='/home/teddy/UCF-Train-Val-Test', + help='training data directory') + parser.add_argument('--save-dir', default='/home/teddy/vgg', + help='directory to save models.') + + parser.add_argument('--lr', type=float, default=1e-5, + help='the initial learning rate') + parser.add_argument('--weight-decay', type=float, default=1e-4, + help='the weight decay') + parser.add_argument('--resume', default='', + help='the path of resume training model') + parser.add_argument('--max-model-num', type=int, default=1, + help='max models num to save ') + parser.add_argument('--max-epoch', type=int, default=1000, + help='max training epoch') + parser.add_argument('--val-epoch', type=int, default=5, + help='the num of steps to log training information') + parser.add_argument('--val-start', type=int, default=600, + help='the epoch start to val') + + parser.add_argument('--batch-size', type=int, default=1, + help='train batch size') + parser.add_argument('--device', default='0', help='assign device') + parser.add_argument('--num-workers', type=int, default=8, + help='the num of training process') + + parser.add_argument('--is-gray', type=bool, default=False, + help='whether the input image is gray') + parser.add_argument('--crop-size', type=int, default=512, + help='the crop size of the train image') + parser.add_argument('--downsample-ratio', type=int, default=8, + help='downsample ratio') + + parser.add_argument('--use-background', type=bool, default=True, + help='whether to use background modelling') + parser.add_argument('--sigma', type=float, default=8.0, + help='sigma for likelihood') + parser.add_argument('--background-ratio', type=float, default=1.0, + help='background ratio') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + print("args",args) + torch.backends.cudnn.benchmark = True + os.environ['CUDA_VISIBLE_DEVICES'] = args.device.strip() # set vis gpu + #trainer = RegTrainer(args) + #trainer.setup() + #trainer.train() \ No newline at end of file diff --git a/misc/cal_mean.py b/misc/cal_mean.py index 48313b3..bd3304c 100644 --- a/misc/cal_mean.py +++ b/misc/cal_mean.py @@ -2,42 +2,46 @@ import torchvision.datasets as dset - import pdb from PIL import Image import numpy as np import os -# TODO +# TODO + def make_parser(): parser = argparse.ArgumentParser() - parser.add_argument('--trainDataPath', type=str, default='/media/D/DataSet/UCF-QNRF_ECCV18/train_img', - help='absolute path to your data path') + parser.add_argument( + "--trainDataPath", + type=str, + default="/media/D/DataSet/UCF-QNRF_ECCV18/train_img", + help="absolute path to your data path", + ) return parser -if __name__ == '__main__': + +if __name__ == "__main__": args = make_parser().parse_args() imgs_list = [] for i_img, img_name in enumerate(os.listdir(args.trainDataPath)): if i_img % 100 == 0: - print( i_img ) + print(i_img) img = Image.open(os.path.join(args.trainDataPath, img_name)) - if img.mode == 'L': - img = img.convert('RGB') + if img.mode == "L": + img = img.convert("RGB") - img = np.array(img.resize((1024,768),Image.BILINEAR)) + img = np.array(img.resize((1024, 768), Image.BILINEAR)) imgs_list.append(img) - imgs = np.array(imgs_list).astype(np.float32)/255. - red = imgs[:,:,:,0] - green = imgs[:,:,:,1] - blue = imgs[:,:,:,2] - + imgs = np.array(imgs_list).astype(np.float32) / 255.0 + red = imgs[:, :, :, 0] + green = imgs[:, :, :, 1] + blue = imgs[:, :, :, 2] - print("means: [{}, {}, {}]".format(np.mean(red),np.mean(green),np.mean(blue))) - print("stdevs: [{}, {}, {}]".format(np.std(red),np.std(green),np.std(blue))) + print("means: [{}, {}, {}]".format(np.mean(red), np.mean(green), np.mean(blue))) + print("stdevs: [{}, {}, {}]".format(np.std(red), np.std(green), np.std(blue))) diff --git a/misc/layer.py b/misc/layer.py index 1ac723a..22a4696 100644 --- a/misc/layer.py +++ b/misc/layer.py @@ -3,19 +3,47 @@ class Conv2d(nn.Module): - def __init__(self, in_channels, out_channels, kernel_size, stride=1, NL='relu', same_padding=False, bn=False, dilation=1): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + NL="relu", + same_padding=False, + bn=False, + dilation=1, + ): super(Conv2d, self).__init__() padding = int((kernel_size - 1) // 2) if same_padding else 0 self.conv = [] - if dilation==1: - self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=padding, dilation=dilation) + if dilation == 1: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride, + padding=padding, + dilation=dilation, + ) else: - self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding=dilation, dilation=dilation) - self.bn = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0, affine=True) if bn else None - if NL == 'relu' : - self.relu = nn.ReLU(inplace=True) - elif NL == 'prelu': - self.relu = nn.PReLU() + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size, + stride, + padding=dilation, + dilation=dilation, + ) + self.bn = ( + nn.BatchNorm2d(out_channels, eps=0.001, momentum=0, affine=True) + if bn + else None + ) + if NL == "relu": + self.relu = nn.ReLU(inplace=True) + elif NL == "prelu": + self.relu = nn.PReLU() else: self.relu = None @@ -29,13 +57,13 @@ def forward(self, x): class FC(nn.Module): - def __init__(self, in_features, out_features, NL='relu'): + def __init__(self, in_features, out_features, NL="relu"): super(FC, self).__init__() self.fc = nn.Linear(in_features, out_features) - if NL == 'relu' : - self.relu = nn.ReLU(inplace=True) - elif NL == 'prelu': - self.relu = nn.PReLU() + if NL == "relu": + self.relu = nn.ReLU(inplace=True) + elif NL == "prelu": + self.relu = nn.PReLU() else: self.relu = None @@ -47,69 +75,72 @@ def forward(self, x): class convDU(nn.Module): - - def __init__(self, - in_out_channels=2048, - kernel_size=(9,1) - ): + def __init__(self, in_out_channels=2048, kernel_size=(9, 1)): super(convDU, self).__init__() self.conv = nn.Sequential( - nn.Conv2d(in_out_channels, in_out_channels, kernel_size, stride=1, padding=((kernel_size[0]-1)//2,(kernel_size[1]-1)//2)), - nn.ReLU(inplace=True) - ) + nn.Conv2d( + in_out_channels, + in_out_channels, + kernel_size, + stride=1, + padding=((kernel_size[0] - 1) // 2, (kernel_size[1] - 1) // 2), + ), + nn.ReLU(inplace=True), + ) def forward(self, fea): n, c, h, w = fea.size() fea_stack = [] for i in range(h): - i_fea = fea.select(2, i).resize(n,c,1,w) + i_fea = fea.select(2, i).resize(n, c, 1, w) if i == 0: fea_stack.append(i_fea) continue - fea_stack.append(self.conv(fea_stack[i-1])+i_fea) + fea_stack.append(self.conv(fea_stack[i - 1]) + i_fea) # pdb.set_trace() # fea[:,i,:,:] = self.conv(fea[:,i-1,:,:].expand(n,1,h,w))+fea[:,i,:,:].expand(n,1,h,w) - for i in range(h): - pos = h-i-1 - if pos == h-1: + pos = h - i - 1 + if pos == h - 1: continue - fea_stack[pos] = self.conv(fea_stack[pos+1])+fea_stack[pos] + fea_stack[pos] = self.conv(fea_stack[pos + 1]) + fea_stack[pos] # pdb.set_trace() fea = torch.cat(fea_stack, 2) return fea -class convLR(nn.Module): - def __init__(self, - in_out_channels=2048, - kernel_size=(1,9) - ): +class convLR(nn.Module): + def __init__(self, in_out_channels=2048, kernel_size=(1, 9)): super(convLR, self).__init__() self.conv = nn.Sequential( - nn.Conv2d(in_out_channels, in_out_channels, kernel_size, stride=1, padding=((kernel_size[0]-1)//2,(kernel_size[1]-1)//2)), - nn.ReLU(inplace=True) - ) + nn.Conv2d( + in_out_channels, + in_out_channels, + kernel_size, + stride=1, + padding=((kernel_size[0] - 1) // 2, (kernel_size[1] - 1) // 2), + ), + nn.ReLU(inplace=True), + ) def forward(self, fea): n, c, h, w = fea.size() fea_stack = [] for i in range(w): - i_fea = fea.select(3, i).resize(n,c,h,1) + i_fea = fea.select(3, i).resize(n, c, h, 1) if i == 0: fea_stack.append(i_fea) continue - fea_stack.append(self.conv(fea_stack[i-1])+i_fea) + fea_stack.append(self.conv(fea_stack[i - 1]) + i_fea) for i in range(w): - pos = w-i-1 - if pos == w-1: + pos = w - i - 1 + if pos == w - 1: continue - fea_stack[pos] = self.conv(fea_stack[pos+1])+fea_stack[pos] - + fea_stack[pos] = self.conv(fea_stack[pos + 1]) + fea_stack[pos] fea = torch.cat(fea_stack, 3) - return fea \ No newline at end of file + return fea diff --git a/misc/post_prob.py b/misc/post_prob.py new file mode 100644 index 0000000..dcfef3d --- /dev/null +++ b/misc/post_prob.py @@ -0,0 +1,50 @@ +import torch +from torch.nn import Module + +class Post_Prob(Module): + def __init__(self, sigma, c_size, stride, background_ratio, use_background, device): + super(Post_Prob, self).__init__() + assert c_size % stride == 0 + + self.sigma = sigma + self.bg_ratio = background_ratio + self.device = device + # coordinate is same to image space, set to constant since crop size is same + self.cood = torch.arange(0, c_size, step=stride, + dtype=torch.float32, device=device) + stride / 2 + self.cood.unsqueeze_(0) + self.softmax = torch.nn.Softmax(dim=0) + self.use_bg = use_background + + def forward(self, points, st_sizes): + num_points_per_image = [len(points_per_image) for points_per_image in points] + all_points = torch.cat(points, dim=0) + + if len(all_points) > 0: + x = all_points[:, 0].unsqueeze_(1) + y = all_points[:, 1].unsqueeze_(1) + x_dis = -2 * torch.matmul(x, self.cood) + x * x + self.cood * self.cood + y_dis = -2 * torch.matmul(y, self.cood) + y * y + self.cood * self.cood + y_dis.unsqueeze_(2) + x_dis.unsqueeze_(1) + dis = y_dis + x_dis + dis = dis.view((dis.size(0), -1)) + + dis_list = torch.split(dis, num_points_per_image) + prob_list = [] + for dis, st_size in zip(dis_list, st_sizes): + if len(dis) > 0: + if self.use_bg: + min_dis = torch.clamp(torch.min(dis, dim=0, keepdim=True)[0], min=0.0) + bg_dis = (st_size * self.bg_ratio) ** 2 / (min_dis + 1e-5) + dis = torch.cat([dis, bg_dis], 0) # concatenate background distance to the last + dis = -dis / (2.0 * self.sigma ** 2) + prob = self.softmax(dis) + else: + prob = None + prob_list.append(prob) + else: + prob_list = [] + for _ in range(len(points)): + prob_list.append(None) + return prob_list diff --git a/misc/ssim_loss.py b/misc/ssim_loss.py index 0d7bcef..50ae31c 100644 --- a/misc/ssim_loss.py +++ b/misc/ssim_loss.py @@ -8,16 +8,18 @@ from torch.nn.modules.loss import _assert_no_grad, _Loss import numpy as np + def gaussian_kernel(size, sigma): - x, y = np.mgrid[-size:size+1, -size:size+1] - kernel = np.exp(-0.5*(x*x+y*y)/(sigma*sigma)) + x, y = np.mgrid[-size : size + 1, -size : size + 1] + kernel = np.exp(-0.5 * (x * x + y * y) / (sigma * sigma)) kernel /= kernel.sum() return kernel + class SSIM_Loss(_Loss): def __init__(self, in_channels, size=11, sigma=1.5, size_average=True): super(SSIM_Loss, self).__init__(size_average) - #assert in_channels == 1, 'Only support single-channel input' + # assert in_channels == 1, 'Only support single-channel input' self.in_channels = in_channels self.size = int(size) self.sigma = sigma @@ -31,19 +33,38 @@ def __init__(self, in_channels, size=11, sigma=1.5, size_average=True): def forward(self, input, target, mask=None): _assert_no_grad(target) mean1 = F.conv2d(input, self.weight, padding=self.size, groups=self.in_channels) - mean2 = F.conv2d(target, self.weight, padding=self.size, groups=self.in_channels) - mean1_sq = mean1*mean1 - mean2_sq = mean2*mean2 - mean_12 = mean1*mean2 + mean2 = F.conv2d( + target, self.weight, padding=self.size, groups=self.in_channels + ) + mean1_sq = mean1 * mean1 + mean2_sq = mean2 * mean2 + mean_12 = mean1 * mean2 + + sigma1_sq = ( + F.conv2d( + input * input, self.weight, padding=self.size, groups=self.in_channels + ) + - mean1_sq + ) + sigma2_sq = ( + F.conv2d( + target * target, self.weight, padding=self.size, groups=self.in_channels + ) + - mean2_sq + ) + sigma_12 = ( + F.conv2d( + input * target, self.weight, padding=self.size, groups=self.in_channels + ) + - mean_12 + ) - sigma1_sq = F.conv2d(input*input, self.weight, padding=self.size, groups=self.in_channels) - mean1_sq - sigma2_sq = F.conv2d(target*target, self.weight, padding=self.size, groups=self.in_channels) - mean2_sq - sigma_12 = F.conv2d(input*target, self.weight, padding=self.size, groups=self.in_channels) - mean_12 - C1 = 0.01**2 C2 = 0.03**2 - ssim = ((2*mean_12+C1)*(2*sigma_12+C2)) / ((mean1_sq+mean2_sq+C1)*(sigma1_sq+sigma2_sq+C2)) + ssim = ((2 * mean_12 + C1) * (2 * sigma_12 + C2)) / ( + (mean1_sq + mean2_sq + C1) * (sigma1_sq + sigma2_sq + C2) + ) if self.size_average: out = 1 - ssim.mean() else: @@ -51,7 +72,7 @@ def forward(self, input, target, mask=None): return out -if __name__ == '__main__': +if __name__ == "__main__": data = torch.zeros(1, 1, 1, 1) data += 0.001 target = torch.zeros(1, 1, 1, 1) @@ -59,7 +80,7 @@ def forward(self, input, target, mask=None): target = Variable(target) model = SSIM_Loss(1) - loss = model(data, target) + loss = model(data, target) loss.backward() print(loss) - print(data.grad) \ No newline at end of file + print(data.grad) diff --git a/misc/transforms.py b/misc/transforms.py index b4db486..9df9f6c 100644 --- a/misc/transforms.py +++ b/misc/transforms.py @@ -4,8 +4,10 @@ from PIL import Image, ImageOps, ImageFilter from config import cfg import torch + # ===============================img tranforms============================ + class Compose(object): def __init__(self, transforms): self.transforms = transforms @@ -19,21 +21,29 @@ def __call__(self, img, mask, bbx=None): img, mask, bbx = t(img, mask, bbx) return img, mask, bbx + class RandomHorizontallyFlip(object): def __call__(self, img, mask, bbx=None): if random.random() < 0.5: if bbx is None: - return img.transpose(Image.FLIP_LEFT_RIGHT), mask.transpose(Image.FLIP_LEFT_RIGHT) + return img.transpose(Image.FLIP_LEFT_RIGHT), mask.transpose( + Image.FLIP_LEFT_RIGHT + ) w, h = img.size - xmin = w - bbx[:,3] - xmax = w - bbx[:,1] - bbx[:,1] = xmin - bbx[:,3] = xmax - return img.transpose(Image.FLIP_LEFT_RIGHT), mask.transpose(Image.FLIP_LEFT_RIGHT), bbx + xmin = w - bbx[:, 3] + xmax = w - bbx[:, 1] + bbx[:, 1] = xmin + bbx[:, 3] = xmax + return ( + img.transpose(Image.FLIP_LEFT_RIGHT), + mask.transpose(Image.FLIP_LEFT_RIGHT), + bbx, + ) if bbx is None: return img, mask return img, mask, bbx + class RandomCrop(object): def __init__(self, size, padding=0): if isinstance(size, numbers.Number): @@ -56,11 +66,15 @@ def __call__(self, img, mask, dst_size=None): if w == tw and h == th: return img, mask if w < tw or h < th: - return img.resize((tw, th), Image.BILINEAR), mask.resize((tw, th), Image.NEAREST) + return img.resize((tw, th), Image.BILINEAR), mask.resize( + (tw, th), Image.NEAREST + ) x1 = random.randint(0, w - tw) y1 = random.randint(0, h - th) - return img.crop((x1, y1, x1 + tw, y1 + th)), mask.crop((x1, y1, x1 + tw, y1 + th)) + return img.crop((x1, y1, x1 + tw, y1 + th)), mask.crop( + (x1, y1, x1 + tw, y1 + th) + ) class CenterCrop(object): @@ -73,10 +87,11 @@ def __init__(self, size): def __call__(self, img, mask): w, h = img.size th, tw = self.size - x1 = int(round((w - tw) / 2.)) - y1 = int(round((h - th) / 2.)) - return img.crop((x1, y1, x1 + tw, y1 + th)), mask.crop((x1, y1, x1 + tw, y1 + th)) - + x1 = int(round((w - tw) / 2.0)) + y1 = int(round((h - th) / 2.0)) + return img.crop((x1, y1, x1 + tw, y1 + th)), mask.crop( + (x1, y1, x1 + tw, y1 + th) + ) class FreeScale(object): @@ -84,7 +99,9 @@ def __init__(self, size): self.size = size # (h, w) def __call__(self, img, mask): - return img.resize((self.size[1], self.size[0]), Image.BILINEAR), mask.resize((self.size[1], self.size[0]), Image.NEAREST) + return img.resize((self.size[1], self.size[0]), Image.BILINEAR), mask.resize( + (self.size[1], self.size[0]), Image.NEAREST + ) class ScaleDown(object): @@ -92,7 +109,10 @@ def __init__(self, size): self.size = size # (h, w) def __call__(self, mask): - return mask.resize((self.size[1]/cfg.TRAIN.DOWNRATE, self.size[0]/cfg.TRAIN.DOWNRATE), Image.NEAREST) + return mask.resize( + (self.size[1] / cfg.TRAIN.DOWNRATE, self.size[0] / cfg.TRAIN.DOWNRATE), + Image.NEAREST, + ) class Scale(object): @@ -101,8 +121,8 @@ def __init__(self, size): def __call__(self, img, mask): if img.size != mask.size: - print( img.size ) - print( mask.size ) + print(img.size) + print(mask.size) assert img.size == mask.size w, h = img.size if (w <= h and w == self.size) or (h <= w and h == self.size): @@ -110,15 +130,20 @@ def __call__(self, img, mask): if w < h: ow = self.size oh = int(self.size * h / w) - return img.resize((ow, oh), Image.BILINEAR), mask.resize((ow, oh), Image.NEAREST) + return img.resize((ow, oh), Image.BILINEAR), mask.resize( + (ow, oh), Image.NEAREST + ) else: oh = self.size ow = int(self.size * w / h) - return img.resize((ow, oh), Image.BILINEAR), mask.resize((ow, oh), Image.NEAREST) + return img.resize((ow, oh), Image.BILINEAR), mask.resize( + (ow, oh), Image.NEAREST + ) # ===============================label tranforms============================ + class DeNormalize(object): def __init__(self, mean, std): self.mean = mean @@ -142,17 +167,22 @@ def __init__(self, para): def __call__(self, tensor): # tensor = 1./(tensor+self.para).log() tensor = torch.from_numpy(np.array(tensor)) - tensor = tensor*self.para + tensor = tensor * self.para return tensor + class GTScaleDown(object): def __init__(self, factor=8): self.factor = factor def __call__(self, img): w, h = img.size - if self.factor==1: + if self.factor == 1: return img - tmp = np.array(img.resize((w//self.factor, h//self.factor), Image.BICUBIC))*self.factor*self.factor + tmp = ( + np.array(img.resize((w // self.factor, h // self.factor), Image.BICUBIC)) + * self.factor + * self.factor + ) img = Image.fromarray(tmp) return img diff --git a/misc/utils.py b/misc/utils.py index 85c9464..a16d2bb 100644 --- a/misc/utils.py +++ b/misc/utils.py @@ -26,7 +26,7 @@ def real_init_weights(m): for mini_m in m: real_init_weights(mini_m) else: - if isinstance(m, nn.Conv2d): + if isinstance(m, nn.Conv2d): nn.init.normal_(m.weight, std=0.01) if m.bias is not None: nn.init.constant_(m.bias, 0) @@ -35,21 +35,22 @@ def real_init_weights(m): elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) - elif isinstance(m,nn.Module): + elif isinstance(m, nn.Module): for mini_m in m.children(): real_init_weights(mini_m) else: - print( m ) + print(m) + def weights_normal_init(*models): for model in models: - dev=0.01 + dev = 0.01 if isinstance(model, list): for m in model: weights_normal_init(m, dev) else: - for m in model.modules(): - if isinstance(m, nn.Conv2d): + for m in model.modules(): + if isinstance(m, nn.Conv2d): m.weight.data.normal_(0.0, dev) if m.bias is not None: m.bias.data.fill_(0.0) @@ -60,63 +61,58 @@ def weights_normal_init(*models): def logger(exp_path, exp_name, work_dir, exception, resume=False): from tensorboardX import SummaryWriter - + if not os.path.exists(exp_path): os.mkdir(exp_path) - writer = SummaryWriter(exp_path+ '/' + exp_name) - log_file = exp_path + '/' + exp_name + '/' + exp_name + '.txt' - - cfg_file = open('./config.py',"r") + writer = SummaryWriter(exp_path + "/" + exp_name) + log_file = exp_path + "/" + exp_name + "/" + exp_name + ".txt" + + cfg_file = open("./config.py", "r") cfg_lines = cfg_file.readlines() - - with open(log_file, 'a') as f: - f.write(''.join(cfg_lines) + '\n\n\n\n') - if not resume: - copy_cur_env(work_dir, exp_path+ '/' + exp_name + '/code', exception) + with open(log_file, "a") as f: + f.write("".join(cfg_lines) + "\n\n\n\n") + if not resume: + copy_cur_env(work_dir, exp_path + "/" + exp_name + "/code", exception) return writer, log_file - def logger_for_CMTL(exp_path, exp_name, work_dir, exception, resume=False): - + if not os.path.exists(exp_path): os.mkdir(exp_path) - if not os.path.exists(exp_path+ '/' + exp_name): - os.mkdir(exp_path+ '/' + exp_name) - log_file = exp_path + '/' + exp_name + '/' + exp_name + '.txt' - - cfg_file = open('./config.py',"r") + if not os.path.exists(exp_path + "/" + exp_name): + os.mkdir(exp_path + "/" + exp_name) + log_file = exp_path + "/" + exp_name + "/" + exp_name + ".txt" + + cfg_file = open("./config.py", "r") cfg_lines = cfg_file.readlines() - - with open(log_file, 'a') as f: - f.write(''.join(cfg_lines) + '\n\n\n\n') - if not resume: - copy_cur_env(work_dir, exp_path+ '/' + exp_name + '/code', exception) + with open(log_file, "a") as f: + f.write("".join(cfg_lines) + "\n\n\n\n") + if not resume: + copy_cur_env(work_dir, exp_path + "/" + exp_name + "/code", exception) return log_file -def logger_txt(log_file,epoch,scores): + +def logger_txt(log_file, epoch, scores): mae, mse, loss = scores - snapshot_name = 'all_ep_%d_mae_%.1f_mse_%.1f' % (epoch + 1, mae, mse) + snapshot_name = "all_ep_%d_mae_%.1f_mse_%.1f" % (epoch + 1, mae, mse) # pdb.set_trace() - with open(log_file, 'a') as f: - f.write('='*15 + '+'*15 + '='*15 + '\n\n') - f.write(snapshot_name + '\n') - f.write(' [mae %.2f mse %.2f], [val loss %.4f]\n' % (mae, mse, loss)) - f.write('='*15 + '+'*15 + '='*15 + '\n\n') - - - + with open(log_file, "a") as f: + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n\n") + f.write(snapshot_name + "\n") + f.write(" [mae %.2f mse %.2f], [val loss %.4f]\n" % (mae, mse, loss)) + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n\n") def vis_results(exp_name, epoch, writer, restore, img, pred_map, gt_map): @@ -124,112 +120,159 @@ def vis_results(exp_name, epoch, writer, restore, img, pred_map, gt_map): pil_to_tensor = standard_transforms.ToTensor() x = [] - + for idx, tensor in enumerate(zip(img.cpu().data, pred_map, gt_map)): - if idx>1:# show only one group + if idx > 1: # show only one group break pil_input = restore(tensor[0]) - pil_output = torch.from_numpy(tensor[1]/(tensor[2].max()+1e-10)).repeat(3,1,1) - pil_label = torch.from_numpy(tensor[2]/(tensor[2].max()+1e-10)).repeat(3,1,1) - x.extend([pil_to_tensor(pil_input.convert('RGB')), pil_label, pil_output]) + pil_output = torch.from_numpy(tensor[1] / (tensor[2].max() + 1e-10)).repeat( + 3, 1, 1 + ) + pil_label = torch.from_numpy(tensor[2] / (tensor[2].max() + 1e-10)).repeat( + 3, 1, 1 + ) + x.extend([pil_to_tensor(pil_input.convert("RGB")), pil_label, pil_output]) x = torch.stack(x, 0) x = vutils.make_grid(x, nrow=3, padding=5) - x = (x.numpy()*255).astype(np.uint8) + x = (x.numpy() * 255).astype(np.uint8) - writer.add_image(exp_name + '_epoch_' + str(epoch+1), x) + writer.add_image(exp_name + "_epoch_" + str(epoch + 1), x) - -def print_summary(exp_name,scores,train_record): +def print_summary(exp_name, scores, train_record): mae, mse, loss = scores - print( '='*50 ) - print( exp_name ) - print( ' '+ '-'*20 ) - print( ' [mae %.2f mse %.2f], [val loss %.4f]' % (mae, mse, loss) ) - print( ' '+ '-'*20 ) - print( '[best] [model: %s] , [mae %.2f], [mse %.2f]' % (train_record['best_model_name'],\ - train_record['best_mae'],\ - train_record['best_mse']) ) - print( '='*50) - -def print_WE_summary(log_txt,epoch,scores,train_record,c_maes): + print("=" * 50) + print(exp_name) + print(" " + "-" * 20) + print(" [mae %.2f mse %.2f], [val loss %.4f]" % (mae, mse, loss)) + print(" " + "-" * 20) + print( + "[best] [model: %s] , [mae %.2f], [mse %.2f]" + % ( + train_record["best_model_name"], + train_record["best_mae"], + train_record["best_mse"], + ) + ) + print("=" * 50) + + +def print_WE_summary(log_txt, epoch, scores, train_record, c_maes): mae, mse, loss = scores # pdb.set_trace() - with open(log_txt, 'a') as f: - f.write('='*15 + '+'*15 + '='*15 + '\n') - f.write(str(epoch) + '\n\n') - f.write(' [mae %.4f], [val loss %.4f]\n\n' % (mae, loss)) - f.write(' list: ' + str(np.transpose(c_maes.avg)) + '\n') - - - f.write('='*15 + '+'*15 + '='*15 + '\n\n') - - print( '='*50 ) - print( ' '+ '-'*20 ) - print( ' [mae %.2f mse %.2f], [val loss %.4f]' % (mae, mse, loss) ) - print( ' '+ '-'*20 ) - print( '[best] [model: %s] , [mae %.2f], [mse %.2f]' % (train_record['best_model_name'],\ - train_record['best_mae'],\ - train_record['best_mse']) ) - print( '='*50 ) - - -def print_GCC_summary(log_txt,epoch, scores,train_record,c_maes,c_mses): + with open(log_txt, "a") as f: + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n") + f.write(str(epoch) + "\n\n") + f.write(" [mae %.4f], [val loss %.4f]\n\n" % (mae, loss)) + f.write(" list: " + str(np.transpose(c_maes.avg)) + "\n") + + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n\n") + + print("=" * 50) + print(" " + "-" * 20) + print(" [mae %.2f mse %.2f], [val loss %.4f]" % (mae, mse, loss)) + print(" " + "-" * 20) + print( + "[best] [model: %s] , [mae %.2f], [mse %.2f]" + % ( + train_record["best_model_name"], + train_record["best_mae"], + train_record["best_mse"], + ) + ) + print("=" * 50) + + +def print_GCC_summary(log_txt, epoch, scores, train_record, c_maes, c_mses): mae, mse, loss = scores - c_mses['level'] = np.sqrt(c_mses['level'].avg) - c_mses['time'] = np.sqrt(c_mses['time'].avg) - c_mses['weather'] = np.sqrt(c_mses['weather'].avg) - with open(log_txt, 'a') as f: - f.write('='*15 + '+'*15 + '='*15 + '\n') - f.write(str(epoch) + '\n\n') - f.write(' [mae %.4f mse %.4f], [val loss %.4f]\n\n' % (mae, mse, loss)) - f.write(' [level: mae %.4f mse %.4f]\n' % (np.average(c_maes['level'].avg), np.average(c_mses['level']))) - f.write(' list: ' + str(np.transpose(c_maes['level'].avg)) + '\n') - f.write(' list: ' + str(np.transpose(c_mses['level'])) + '\n\n') - - f.write(' [time: mae %.4f mse %.4f]\n' % (np.average(c_maes['time'].avg), np.average(c_mses['time']))) - f.write(' list: ' + str(np.transpose(c_maes['time'].avg)) + '\n') - f.write(' list: ' + str(np.transpose(c_mses['time'])) + '\n\n') - - f.write(' [weather: mae %.4f mse %.4f]\n' % (np.average(c_maes['weather'].avg), np.average(c_mses['weather']))) - f.write(' list: ' + str(np.transpose(c_maes['weather'].avg)) + '\n') - f.write(' list: ' + str(np.transpose(c_mses['weather']))+ '\n\n') - - f.write('='*15 + '+'*15 + '='*15 + '\n\n') - - print( '='*50 ) - print( ' '+ '-'*20 ) - print( ' [mae %.2f mse %.2f], [val loss %.4f]' % (mae, mse, loss) ) - print( ' '+ '-'*20 ) - print( '[best] [model: %s] , [mae %.2f], [mse %.2f]' % (train_record['best_model_name'],\ - train_record['best_mae'],\ - train_record['best_mse']) ) - print( '='*50 ) - - -def update_model(net,optimizer,scheduler,epoch,i_tb,exp_path,exp_name,scores,train_record,log_file=None): + c_mses["level"] = np.sqrt(c_mses["level"].avg) + c_mses["time"] = np.sqrt(c_mses["time"].avg) + c_mses["weather"] = np.sqrt(c_mses["weather"].avg) + with open(log_txt, "a") as f: + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n") + f.write(str(epoch) + "\n\n") + f.write(" [mae %.4f mse %.4f], [val loss %.4f]\n\n" % (mae, mse, loss)) + f.write( + " [level: mae %.4f mse %.4f]\n" + % (np.average(c_maes["level"].avg), np.average(c_mses["level"])) + ) + f.write(" list: " + str(np.transpose(c_maes["level"].avg)) + "\n") + f.write(" list: " + str(np.transpose(c_mses["level"])) + "\n\n") + + f.write( + " [time: mae %.4f mse %.4f]\n" + % (np.average(c_maes["time"].avg), np.average(c_mses["time"])) + ) + f.write(" list: " + str(np.transpose(c_maes["time"].avg)) + "\n") + f.write(" list: " + str(np.transpose(c_mses["time"])) + "\n\n") + + f.write( + " [weather: mae %.4f mse %.4f]\n" + % (np.average(c_maes["weather"].avg), np.average(c_mses["weather"])) + ) + f.write(" list: " + str(np.transpose(c_maes["weather"].avg)) + "\n") + f.write(" list: " + str(np.transpose(c_mses["weather"])) + "\n\n") + + f.write("=" * 15 + "+" * 15 + "=" * 15 + "\n\n") + + print("=" * 50) + print(" " + "-" * 20) + print(" [mae %.2f mse %.2f], [val loss %.4f]" % (mae, mse, loss)) + print(" " + "-" * 20) + print( + "[best] [model: %s] , [mae %.2f], [mse %.2f]" + % ( + train_record["best_model_name"], + train_record["best_mae"], + train_record["best_mse"], + ) + ) + print("=" * 50) + + +def update_model( + net, + optimizer, + scheduler, + epoch, + i_tb, + exp_path, + exp_name, + scores, + train_record, + log_file=None, +): mae, mse, loss = scores - snapshot_name = 'all_ep_%d_mae_%.1f_mse_%.1f' % (epoch + 1, mae, mse) + snapshot_name = "all_ep_%d_mae_%.1f_mse_%.1f" % (epoch + 1, mae, mse) - if mae < train_record['best_mae'] or mse < train_record['best_mse']: - train_record['best_model_name'] = snapshot_name + if mae < train_record["best_mae"] or mse < train_record["best_mse"]: + train_record["best_model_name"] = snapshot_name if log_file is not None: - logger_txt(log_file,epoch,scores) + logger_txt(log_file, epoch, scores) to_saved_weight = net.state_dict() - torch.save(to_saved_weight, os.path.join(exp_path, exp_name, snapshot_name + '.pth')) - - if mae < train_record['best_mae']: - train_record['best_mae'] = mae - if mse < train_record['best_mse']: - train_record['best_mse'] = mse - - latest_state = {'train_record':train_record, 'net':net.state_dict(), 'optimizer':optimizer.state_dict(),\ - 'scheduler':scheduler.state_dict(), 'epoch': epoch, 'i_tb':i_tb, 'exp_path':exp_path, \ - 'exp_name':exp_name} - - torch.save(latest_state,os.path.join(exp_path, exp_name, 'latest_state.pth')) + torch.save( + to_saved_weight, os.path.join(exp_path, exp_name, snapshot_name + ".pth") + ) + + if mae < train_record["best_mae"]: + train_record["best_mae"] = mae + if mse < train_record["best_mse"]: + train_record["best_mse"] = mse + + latest_state = { + "train_record": train_record, + "net": net.state_dict(), + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict(), + "epoch": epoch, + "i_tb": i_tb, + "exp_path": exp_path, + "exp_name": exp_name, + } + + torch.save(latest_state, os.path.join(exp_path, exp_name, "latest_state.pth")) return train_record @@ -241,16 +284,13 @@ def copy_cur_env(work_dir, dst_dir, exception): for filename in os.listdir(work_dir): - file = os.path.join(work_dir,filename) - dst_file = os.path.join(dst_dir,filename) - + file = os.path.join(work_dir, filename) + dst_file = os.path.join(dst_dir, filename) if os.path.isdir(file) and exception not in filename: shutil.copytree(file, dst_file) elif os.path.isfile(file): - shutil.copyfile(file,dst_file) - - + shutil.copyfile(file, dst_file) class AverageMeter(object): @@ -271,10 +311,11 @@ def update(self, cur_val): self.count += 1 self.avg = self.sum / self.count + class AverageCategoryMeter(object): """Computes and stores the average and current value""" - def __init__(self,num_class): + def __init__(self, num_class): self.num_class = num_class self.reset() @@ -293,12 +334,13 @@ def update(self, cur_val, class_id): class Timer(object): """A simple timer.""" + def __init__(self): - self.total_time = 0. + self.total_time = 0.0 self.calls = 0 - self.start_time = 0. - self.diff = 0. - self.average_time = 0. + self.start_time = 0.0 + self.diff = 0.0 + self.average_time = 0.0 def tic(self): # using time.time instead of time.clock because time time.clock @@ -314,11 +356,3 @@ def toc(self, average=True): return self.average_time else: return self.diff - - - - - - - - diff --git a/models/CC.py b/models/CC.py index cc7f2a6..c2c404e 100644 --- a/models/CC.py +++ b/models/CC.py @@ -2,49 +2,38 @@ import torch.nn as nn import torch.nn.functional as F import pdb +import imp + class CrowdCounter(nn.Module): - def __init__(self,gpus,model_name): - super(CrowdCounter, self).__init__() - - if model_name == 'AlexNet': - from .SCC_Model.AlexNet import AlexNet as net - elif model_name == 'VGG': - from .SCC_Model.VGG import VGG as net - elif model_name == 'VGG_DECODER': - from .SCC_Model.VGG_decoder import VGG_decoder as net - elif model_name == 'MCNN': - from .SCC_Model.MCNN import MCNN as net - elif model_name == 'CSRNet': - from .SCC_Model.CSRNet import CSRNet as net - elif model_name == 'Res50': - from .SCC_Model.Res50 import Res50 as net - elif model_name == 'Res101': - from .SCC_Model.Res101 import Res101 as net - elif model_name == 'Res101_SFCN': - from .SCC_Model.Res101_SFCN import Res101_SFCN as net + def __init__(self, gpus, model_name): + super(CrowdCounter, self).__init__() + + net = getattr( + imp.load_source("network_src", "models/SCC_Model/" + model_name + ".py"), + model_name, + ) self.CCN = net() - if len(gpus)>1: + if len(gpus) > 1: self.CCN = torch.nn.DataParallel(self.CCN, device_ids=gpus).cuda() else: - self.CCN=self.CCN.cuda() + self.CCN = self.CCN.cuda() self.loss_mse_fn = nn.MSELoss().cuda() - + @property def loss(self): return self.loss_mse - - def forward(self, img, gt_map): - density_map = self.CCN(img) - self.loss_mse= self.build_loss(density_map.squeeze(), gt_map.squeeze()) + + def forward(self, img, gt_map): + density_map = self.CCN(img) + self.loss_mse = self.build_loss(density_map.squeeze(), gt_map.squeeze()) return density_map - + def build_loss(self, density_map, gt_data): - loss_mse = self.loss_mse_fn(density_map, gt_data) + loss_mse = self.loss_mse_fn(density_map, gt_data) return loss_mse - def test_forward(self, img): - density_map = self.CCN(img) + def test_forward(self, img): + density_map = self.CCN(img) return density_map - diff --git a/models/M2T2OCC.py b/models/M2T2OCC.py index 00714b6..2175331 100644 --- a/models/M2T2OCC.py +++ b/models/M2T2OCC.py @@ -8,10 +8,10 @@ class CrowdCounter(nn.Module): - def __init__(self, gpus, model_name,loss_1_fn,loss_2_fn): + def __init__(self, gpus, model_name, loss_1_fn, loss_2_fn): super(CrowdCounter, self).__init__() - if model_name == 'CMTL': - from M2T2OCC_Model.CMTL import CMTL as net + if model_name == "CMTL": + from M2T2OCC_Model.CMTL import CMTL as net self.CCN = net() if len(gpus) > 1: @@ -23,17 +23,18 @@ def __init__(self, gpus, model_name,loss_1_fn,loss_2_fn): @property def loss(self): - return self.loss_mse, self.cross_entropy*cfg.LAMBDA_1 - + return self.loss_mse, self.cross_entropy * cfg.LAMBDA_1 def forward(self, img, gt_map=None, gt_cls_label=None): density_map, density_cls_score = self.CCN(img) # pdb.set_trace() - density_cls_prob = F.softmax(density_cls_score,dim=1) + density_cls_prob = F.softmax(density_cls_score, dim=1) - self.loss_mse, self.cross_entropy = self.build_loss(density_map.squeeze(), gt_map.squeeze(), density_cls_prob, gt_cls_label) + self.loss_mse, self.cross_entropy = self.build_loss( + density_map.squeeze(), gt_map.squeeze(), density_cls_prob, gt_cls_label + ) return density_map def build_loss(self, density_map, gt_data, density_cls_score, gt_cls_label): @@ -45,4 +46,3 @@ def build_loss(self, density_map, gt_data, density_cls_score, gt_cls_label): def test_forward(self, img): density_map, density_cls_score = self.CCN(img) return density_map - diff --git a/models/M2T2OCC_Model/CMTL.py b/models/M2T2OCC_Model/CMTL.py index 2c39e26..762569a 100644 --- a/models/M2T2OCC_Model/CMTL.py +++ b/models/M2T2OCC_Model/CMTL.py @@ -11,53 +11,65 @@ class CMTL(nn.Module): - ''' + """ Implementation of CNN-based Cascaded Multi-task Learning of High-level Prior and Density Estimation for Crowd Counting (Sindagi et al.) - ''' + """ def __init__(self, bn=False, num_classes=10): super(CMTL, self).__init__() self.num_classes = num_classes - self.base_layer = nn.Sequential(Conv2d(3, 16, 9, same_padding=True, NL='prelu', bn=bn), - Conv2d(16, 32, 7, same_padding=True, NL='prelu', bn=bn)) + self.base_layer = nn.Sequential( + Conv2d(3, 16, 9, same_padding=True, NL="prelu", bn=bn), + Conv2d(16, 32, 7, same_padding=True, NL="prelu", bn=bn), + ) - self.hl_prior_1 = nn.Sequential(Conv2d(32, 16, 9, same_padding=True, NL='prelu', bn=bn), - nn.MaxPool2d(2), - Conv2d(16, 32, 7, same_padding=True, NL='prelu', bn=bn), - nn.MaxPool2d(2), - Conv2d(32, 16, 7, same_padding=True, NL='prelu', bn=bn), - Conv2d(16, 8, 7, same_padding=True, NL='prelu', bn=bn)) + self.hl_prior_1 = nn.Sequential( + Conv2d(32, 16, 9, same_padding=True, NL="prelu", bn=bn), + nn.MaxPool2d(2), + Conv2d(16, 32, 7, same_padding=True, NL="prelu", bn=bn), + nn.MaxPool2d(2), + Conv2d(32, 16, 7, same_padding=True, NL="prelu", bn=bn), + Conv2d(16, 8, 7, same_padding=True, NL="prelu", bn=bn), + ) - self.hl_prior_2 = nn.Sequential(nn.AdaptiveMaxPool2d((32, 32)), - Conv2d(8, 4, 1, same_padding=True, NL='prelu', bn=bn)) + self.hl_prior_2 = nn.Sequential( + nn.AdaptiveMaxPool2d((32, 32)), + Conv2d(8, 4, 1, same_padding=True, NL="prelu", bn=bn), + ) - self.hl_prior_fc1 = FC(4 * 1024, 512, NL='prelu') - self.hl_prior_fc2 = FC(512, 256, NL='prelu') - self.hl_prior_fc3 = FC(256, self.num_classes, NL='prelu') + self.hl_prior_fc1 = FC(4 * 1024, 512, NL="prelu") + self.hl_prior_fc2 = FC(512, 256, NL="prelu") + self.hl_prior_fc3 = FC(256, self.num_classes, NL="prelu") - self.de_stage_1 = nn.Sequential(Conv2d(32, 20, 7, same_padding=True, NL='prelu', bn=bn), - nn.MaxPool2d(2), - Conv2d(20, 40, 5, same_padding=True, NL='prelu', bn=bn), - nn.MaxPool2d(2), - Conv2d(40, 20, 5, same_padding=True, NL='prelu', bn=bn), - Conv2d(20, 10, 5, same_padding=True, NL='prelu', bn=bn)) + self.de_stage_1 = nn.Sequential( + Conv2d(32, 20, 7, same_padding=True, NL="prelu", bn=bn), + nn.MaxPool2d(2), + Conv2d(20, 40, 5, same_padding=True, NL="prelu", bn=bn), + nn.MaxPool2d(2), + Conv2d(40, 20, 5, same_padding=True, NL="prelu", bn=bn), + Conv2d(20, 10, 5, same_padding=True, NL="prelu", bn=bn), + ) - self.de_stage_2 = nn.Sequential(Conv2d(18, 24, 3, same_padding=True, NL='prelu', bn=bn), - Conv2d(24, 32, 3, same_padding=True, NL='prelu', bn=bn), - nn.ConvTranspose2d(32, 16, 4, stride=2, padding=1, output_padding=0, bias=True), - nn.PReLU(), - nn.ConvTranspose2d(16, 8, 4, stride=2, padding=1, output_padding=0, bias=True), - nn.PReLU(), - Conv2d(8, 1, 1, same_padding=True, NL='relu', bn=bn)) + self.de_stage_2 = nn.Sequential( + Conv2d(18, 24, 3, same_padding=True, NL="prelu", bn=bn), + Conv2d(24, 32, 3, same_padding=True, NL="prelu", bn=bn), + nn.ConvTranspose2d( + 32, 16, 4, stride=2, padding=1, output_padding=0, bias=True + ), + nn.PReLU(), + nn.ConvTranspose2d( + 16, 8, 4, stride=2, padding=1, output_padding=0, bias=True + ), + nn.PReLU(), + Conv2d(8, 1, 1, same_padding=True, NL="relu", bn=bn), + ) # weights_normal_init(self.base_layer, self.hl_prior_1, self.hl_prior_2, self.hl_prior_fc1, self.hl_prior_fc2, \ # self.hl_prior_fc3, self.de_stage_1, self.de_stage_2) initialize_weights(self.modules()) - - def forward(self, im_data): x_base = self.base_layer(im_data) x_hlp1 = self.hl_prior_1(x_base) @@ -71,4 +83,4 @@ def forward(self, im_data): x_den = self.de_stage_1(x_base) x_den = torch.cat((x_hlp1, x_den), 1) x_den = self.de_stage_2(x_den) - return x_den, x_cls \ No newline at end of file + return x_den, x_cls diff --git a/models/M2TCC.py b/models/M2TCC.py index a804b6a..ea7dcde 100644 --- a/models/M2TCC.py +++ b/models/M2TCC.py @@ -6,34 +6,31 @@ class CrowdCounter(nn.Module): - def __init__(self,gpus,model_name,loss_1_fn,loss_2_fn): - super(CrowdCounter, self).__init__() - - if model_name == 'SANet': - from M2TCC_Model.SANet import SANet as net + def __init__(self, gpus, model_name, loss_1_fn, loss_2_fn): + super(CrowdCounter, self).__init__() + if model_name == "SANet": + from M2TCC_Model.SANet import SANet as net self.CCN = net() - if len(gpus)>1: + if len(gpus) > 1: self.CCN = torch.nn.DataParallel(self.CCN, device_ids=gpus).cuda() else: - self.CCN=self.CCN.cuda() + self.CCN = self.CCN.cuda() self.loss_1_fn = loss_1_fn.cuda() self.loss_2_fn = loss_2_fn.cuda() - + @property def loss(self): - return self.loss_1, self.loss_2*cfg.LAMBDA_1 - - def forward(self, img, gt_map): - density_map = self.CCN(img) - self.loss_1= self.loss_1_fn(density_map.squeeze(), gt_map.squeeze()) - self.loss_2= 1 - self.loss_2_fn(density_map, gt_map[:,None,:,:]) - - return density_map + return self.loss_1, self.loss_2 * cfg.LAMBDA_1 + def forward(self, img, gt_map): + density_map = self.CCN(img) + self.loss_1 = self.loss_1_fn(density_map.squeeze(), gt_map.squeeze()) + self.loss_2 = 1 - self.loss_2_fn(density_map, gt_map[:, None, :, :]) - def test_forward(self, img): - density_map = self.CCN(img) return density_map + def test_forward(self, img): + density_map = self.CCN(img) + return density_map diff --git a/models/M2TCC_Model/SANet.py b/models/M2TCC_Model/SANet.py index 9a801a0..df66e99 100644 --- a/models/M2TCC_Model/SANet.py +++ b/models/M2TCC_Model/SANet.py @@ -25,7 +25,9 @@ class BasicDeconv(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, stride=1, use_bn=False): super(BasicDeconv, self).__init__() self.use_bn = use_bn - self.tconv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride=stride, bias=not self.use_bn) + self.tconv = nn.ConvTranspose2d( + in_channels, out_channels, kernel_size, stride=stride, bias=not self.use_bn + ) self.bn = nn.InstanceNorm2d(out_channels, affine=True) if self.use_bn else None def forward(self, x): @@ -40,15 +42,19 @@ class SAModule_Head(nn.Module): def __init__(self, in_channels, out_channels, use_bn): super(SAModule_Head, self).__init__() branch_out = out_channels // 4 - self.branch1x1 = BasicConv(in_channels, branch_out, use_bn=use_bn, - kernel_size=1) - self.branch3x3 = BasicConv(in_channels, branch_out, use_bn=use_bn, - kernel_size=3, padding=1) - self.branch5x5 = BasicConv(in_channels, branch_out, use_bn=use_bn, - kernel_size=5, padding=2) - self.branch7x7 = BasicConv(in_channels, branch_out, use_bn=use_bn, - kernel_size=7, padding=3) - + self.branch1x1 = BasicConv( + in_channels, branch_out, use_bn=use_bn, kernel_size=1 + ) + self.branch3x3 = BasicConv( + in_channels, branch_out, use_bn=use_bn, kernel_size=3, padding=1 + ) + self.branch5x5 = BasicConv( + in_channels, branch_out, use_bn=use_bn, kernel_size=5, padding=2 + ) + self.branch7x7 = BasicConv( + in_channels, branch_out, use_bn=use_bn, kernel_size=7, padding=3 + ) + def forward(self, x): branch1x1 = self.branch1x1(x) branch3x3 = self.branch3x3(x) @@ -62,27 +68,28 @@ class SAModule(nn.Module): def __init__(self, in_channels, out_channels, use_bn): super(SAModule, self).__init__() branch_out = out_channels // 4 - self.branch1x1 = BasicConv(in_channels, branch_out, use_bn=use_bn, - kernel_size=1) + self.branch1x1 = BasicConv( + in_channels, branch_out, use_bn=use_bn, kernel_size=1 + ) self.branch3x3 = nn.Sequential( - BasicConv(in_channels, 2*branch_out, use_bn=use_bn, - kernel_size=1), - BasicConv(2*branch_out, branch_out, use_bn=use_bn, - kernel_size=3, padding=1), - ) + BasicConv(in_channels, 2 * branch_out, use_bn=use_bn, kernel_size=1), + BasicConv( + 2 * branch_out, branch_out, use_bn=use_bn, kernel_size=3, padding=1 + ), + ) self.branch5x5 = nn.Sequential( - BasicConv(in_channels, 2*branch_out, use_bn=use_bn, - kernel_size=1), - BasicConv(2*branch_out, branch_out, use_bn=use_bn, - kernel_size=5, padding=2), - ) + BasicConv(in_channels, 2 * branch_out, use_bn=use_bn, kernel_size=1), + BasicConv( + 2 * branch_out, branch_out, use_bn=use_bn, kernel_size=5, padding=2 + ), + ) self.branch7x7 = nn.Sequential( - BasicConv(in_channels, 2*branch_out, use_bn=use_bn, - kernel_size=1), - BasicConv(2*branch_out, branch_out, use_bn=use_bn, - kernel_size=7, padding=3), - ) - + BasicConv(in_channels, 2 * branch_out, use_bn=use_bn, kernel_size=1), + BasicConv( + 2 * branch_out, branch_out, use_bn=use_bn, kernel_size=7, padding=3 + ), + ) + def forward(self, x): branch1x1 = self.branch1x1(x) branch3x3 = self.branch3x3(x) @@ -108,21 +115,21 @@ def __init__(self, gray_input=False, use_bn=True): SAModule(128, 128, use_bn), nn.MaxPool2d(2, 2), SAModule(128, 128, use_bn), - ) + ) self.decoder = nn.Sequential( BasicConv(128, 64, use_bn=use_bn, kernel_size=9, padding=4), BasicDeconv(64, 64, 2, stride=2, use_bn=use_bn), BasicConv(64, 32, use_bn=use_bn, kernel_size=7, padding=3), BasicDeconv(32, 32, 2, stride=2, use_bn=use_bn), - BasicConv(32, 16, use_bn=use_bn, kernel_size=5, padding=2), + BasicConv(32, 16, use_bn=use_bn, kernel_size=5, padding=2), BasicDeconv(16, 16, 2, stride=2, use_bn=use_bn), - BasicConv(16, 16, use_bn=use_bn, kernel_size=3, padding=1), + BasicConv(16, 16, use_bn=use_bn, kernel_size=3, padding=1), BasicConv(16, 1, use_bn=False, kernel_size=1), - ) + ) initialize_weights(self.modules()) def forward(self, x): features = self.encoder(x) out = self.decoder(features) - return out \ No newline at end of file + return out diff --git a/models/SCC_Model/AlexNet.py b/models/SCC_Model/AlexNet.py index 5a033c0..51ce2da 100644 --- a/models/SCC_Model/AlexNet.py +++ b/models/SCC_Model/AlexNet.py @@ -8,6 +8,7 @@ # model_path = '../PyTorch_Pretrained/alexnet-owt-4df8aa71.pth' + class AlexNet(nn.Module): def __init__(self, pretrained=True): super(AlexNet, self).__init__() @@ -15,28 +16,32 @@ def __init__(self, pretrained=True): # if pretrained: # alex.load_state_dict(torch.load(model_path)) features = list(alex.features.children()) - - self.layer1 = nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=4) # original padding is 4 - self.layer1plus = nn.Sequential(nn.ReLU(inplace=True), - nn.MaxPool2d(kernel_size=3, stride=2)) - self.layer2 = nn.Conv2d(64, 192, kernel_size=5, padding=3) # original padding is 2 - self.layer2plus_to_5 = nn.Sequential(*features[4:12]) - self.de_pred = nn.Sequential(Conv2d(256, 128, 1, same_padding=True, NL='relu'), - Conv2d(128, 1, 1, same_padding=True, NL='relu')) + self.layer1 = nn.Conv2d( + 3, 64, kernel_size=11, stride=4, padding=4 + ) # original padding is 4 + self.layer1plus = nn.Sequential( + nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2) + ) + self.layer2 = nn.Conv2d( + 64, 192, kernel_size=5, padding=3 + ) # original padding is 2 + self.layer2plus_to_5 = nn.Sequential(*features[4:12]) + self.de_pred = nn.Sequential( + Conv2d(256, 128, 1, same_padding=True, NL="relu"), + Conv2d(128, 1, 1, same_padding=True, NL="relu"), + ) self.layer1.load_state_dict(alex.features[0].state_dict()) self.layer2.load_state_dict(alex.features[3].state_dict()) - - def forward(self, x): - x = self.layer1(x) - x = self.layer1plus(x) + x = self.layer1(x) + x = self.layer1plus(x) x = self.layer2(x) - x = self.layer2plus_to_5(x) + x = self.layer2plus_to_5(x) x = self.de_pred(x) - x = F.upsample(x,scale_factor=16) + x = F.upsample(x, scale_factor=16) - return x \ No newline at end of file + return x diff --git a/models/SCC_Model/CSRNet.py b/models/SCC_Model/CSRNet.py index c6d24b0..4252d1e 100644 --- a/models/SCC_Model/CSRNet.py +++ b/models/SCC_Model/CSRNet.py @@ -3,25 +3,28 @@ from torchvision import models import torch.nn.functional as F + class CSRNet(nn.Module): def __init__(self, load_weights=False): super(CSRNet, self).__init__() self.seen = 0 - self.frontend_feat = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512] - self.backend_feat = [512, 512, 512,256,128,64] + self.frontend_feat = [64, 64, "M", 128, 128, "M", 256, 256, 256, "M", 512, 512, 512] + self.backend_feat = [512, 512, 512, 256, 128, 64] self.frontend = make_layers(self.frontend_feat) - self.backend = make_layers(self.backend_feat,in_channels = 512,dilation = True) + self.backend = make_layers(self.backend_feat, in_channels=512, dilation=True) self.output_layer = nn.Conv2d(64, 1, kernel_size=1) if not load_weights: - mod = models.vgg16(pretrained = True) + mod = models.vgg16(pretrained=True) self._initialize_weights() self.frontend.load_state_dict(mod.features[0:23].state_dict()) - def forward(self,x): + + def forward(self, x): x = self.frontend(x) x = self.backend(x) x = self.output_layer(x) - x = F.upsample(x,scale_factor=8) + x = F.upsample(x, scale_factor=8) return x + def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): @@ -31,22 +34,24 @@ def _initialize_weights(self): elif isinstance(m, nn.BatchNorm2d): nn.init.constant_(m.weight, 1) nn.init.constant_(m.bias, 0) - - -def make_layers(cfg, in_channels = 3,batch_norm=False,dilation = False): + + +def make_layers(cfg, in_channels=3, batch_norm=False, dilation=False): if dilation: d_rate = 2 else: d_rate = 1 layers = [] for v in cfg: - if v == 'M': + if v == "M": layers += [nn.MaxPool2d(kernel_size=2, stride=2)] else: - conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=d_rate,dilation = d_rate) + conv2d = nn.Conv2d( + in_channels, v, kernel_size=3, padding=d_rate, dilation=d_rate + ) if batch_norm: layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] else: layers += [conv2d, nn.ReLU(inplace=True)] in_channels = v - return nn.Sequential(*layers) + return nn.Sequential(*layers) diff --git a/models/SCC_Model/MCNN.py b/models/SCC_Model/MCNN.py index 57fca88..22fd556 100644 --- a/models/SCC_Model/MCNN.py +++ b/models/SCC_Model/MCNN.py @@ -4,45 +4,52 @@ import torch.nn.functional as F from misc.utils import * + class MCNN(nn.Module): - ''' - Multi-column CNN + """ + Multi-column CNN -Implementation of Single Image Crowd Counting via Multi-column CNN (Zhang et al.) - ''' - + """ + def __init__(self, bn=False): super(MCNN, self).__init__() - - self.branch1 = nn.Sequential(Conv2d( 3, 16, 9, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(16, 32, 7, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(32, 16, 7, same_padding=True, bn=bn), - Conv2d(16, 8, 7, same_padding=True, bn=bn)) - - self.branch2 = nn.Sequential(Conv2d( 3, 20, 7, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(20, 40, 5, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(40, 20, 5, same_padding=True, bn=bn), - Conv2d(20, 10, 5, same_padding=True, bn=bn)) - - self.branch3 = nn.Sequential(Conv2d( 3, 24, 5, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(24, 48, 3, same_padding=True, bn=bn), - nn.MaxPool2d(2), - Conv2d(48, 24, 3, same_padding=True, bn=bn), - Conv2d(24, 12, 3, same_padding=True, bn=bn)) - - self.fuse = nn.Sequential(Conv2d( 30, 1, 1, same_padding=True, bn=bn)) - - initialize_weights(self.modules()) - + + self.branch1 = nn.Sequential( + Conv2d(3, 16, 9, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(16, 32, 7, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(32, 16, 7, same_padding=True, bn=bn), + Conv2d(16, 8, 7, same_padding=True, bn=bn), + ) + + self.branch2 = nn.Sequential( + Conv2d(3, 20, 7, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(20, 40, 5, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(40, 20, 5, same_padding=True, bn=bn), + Conv2d(20, 10, 5, same_padding=True, bn=bn), + ) + + self.branch3 = nn.Sequential( + Conv2d(3, 24, 5, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(24, 48, 3, same_padding=True, bn=bn), + nn.MaxPool2d(2), + Conv2d(48, 24, 3, same_padding=True, bn=bn), + Conv2d(24, 12, 3, same_padding=True, bn=bn), + ) + + self.fuse = nn.Sequential(Conv2d(30, 1, 1, same_padding=True, bn=bn)) + + initialize_weights(self.modules()) + def forward(self, im_data): x1 = self.branch1(im_data) x2 = self.branch2(im_data) x3 = self.branch3(im_data) - x = torch.cat((x1,x2,x3),1) + x = torch.cat((x1, x2, x3), 1) x = self.fuse(x) - x = F.upsample(x,scale_factor=4) + x = F.upsample(x, scale_factor=4) return x diff --git a/models/SCC_Model/Res101.py b/models/SCC_Model/Res101.py index 937b347..accd0a2 100644 --- a/models/SCC_Model/Res101.py +++ b/models/SCC_Model/Res101.py @@ -9,14 +9,17 @@ import pdb -model_path = '../PyTorch_Pretrained/resnet101-5d3b4d8f.pth' +model_path = "../PyTorch_Pretrained/resnet101-5d3b4d8f.pth" + class Res101(nn.Module): def __init__(self, pretrained=True): super(Res101, self).__init__() - self.de_pred = nn.Sequential(Conv2d(1024, 128, 1, same_padding=True, NL='relu'), - Conv2d(128, 1, 1, same_padding=True, NL='relu')) + self.de_pred = nn.Sequential( + Conv2d(1024, 128, 1, same_padding=True, NL="relu"), + Conv2d(128, 1, 1, same_padding=True, NL="relu"), + ) # initialize_weights(self.modules()) @@ -26,22 +29,18 @@ def __init__(self, pretrained=True): self.frontend = nn.Sequential( res.conv1, res.bn1, res.relu, res.maxpool, res.layer1, res.layer2 ) - self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 23, stride=1) + self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 23, stride=1) self.own_reslayer_3.load_state_dict(res.layer3.state_dict()) - - - - def forward(self,x): + def forward(self, x): - x = self.frontend(x) x = self.own_reslayer_3(x) x = self.de_pred(x) - x = F.upsample(x,scale_factor=8) + x = F.upsample(x, scale_factor=8) return x def _initialize_weights(self): @@ -52,17 +51,22 @@ def _initialize_weights(self): m.bias.data.fill_(0) elif isinstance(m, nn.BatchNorm2d): m.weight.fill_(1) - m.bias.data.fill_(0) + m.bias.data.fill_(0) def make_res_layer(block, planes, blocks, stride=1): downsample = None - inplanes=512 + inplanes = 512 if stride != 1 or inplanes != planes * block.expansion: downsample = nn.Sequential( - nn.Conv2d(inplanes, planes * block.expansion, - kernel_size=1, stride=stride, bias=False), + nn.Conv2d( + inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), nn.BatchNorm2d(planes * block.expansion), ) @@ -72,7 +76,7 @@ def make_res_layer(block, planes, blocks, stride=1): for i in range(1, blocks): layers.append(block(inplanes, planes)) - return nn.Sequential(*layers) + return nn.Sequential(*layers) class Bottleneck(nn.Module): @@ -82,10 +86,13 @@ def __init__(self, inplanes, planes, stride=1, downsample=None): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) - self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, - padding=1, bias=False) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) self.bn2 = nn.BatchNorm2d(planes) - self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) self.bn3 = nn.BatchNorm2d(planes * self.expansion) self.relu = nn.ReLU(inplace=True) self.downsample = downsample @@ -111,4 +118,4 @@ def forward(self, x): out += residual out = self.relu(out) - return out \ No newline at end of file + return out diff --git a/models/SCC_Model/Res101_SFCN.py b/models/SCC_Model/Res101_SFCN.py index 804367b..832cd79 100644 --- a/models/SCC_Model/Res101_SFCN.py +++ b/models/SCC_Model/Res101_SFCN.py @@ -2,7 +2,7 @@ import torch from torchvision import models -from misc.layer import convDU,convLR +from misc.layer import convDU, convLR import torch.nn.functional as F from misc.utils import * @@ -11,20 +11,19 @@ # model_path = '../PyTorch_Pretrained/resnet101-5d3b4d8f.pth' + class Res101_SFCN(nn.Module): def __init__(self, pretrained=True): super(Res101_SFCN, self).__init__() self.seen = 0 - self.backend_feat = [512, 512, 512,256,128,64] + self.backend_feat = [512, 512, 512, 256, 128, 64] self.frontend = [] - - self.backend = make_layers(self.backend_feat,in_channels = 1024,dilation = True) - self.convDU = convDU(in_out_channels=64,kernel_size=(1,9)) - self.convLR = convLR(in_out_channels=64,kernel_size=(9,1)) - - self.output_layer = nn.Sequential(nn.Conv2d(64, 1, kernel_size=1),nn.ReLU()) + self.backend = make_layers(self.backend_feat, in_channels=1024, dilation=True) + self.convDU = convDU(in_out_channels=64, kernel_size=(1, 9)) + self.convLR = convLR(in_out_channels=64, kernel_size=(9, 1)) + self.output_layer = nn.Sequential(nn.Conv2d(64, 1, kernel_size=1), nn.ReLU()) initialize_weights(self.modules()) @@ -34,13 +33,10 @@ def __init__(self, pretrained=True): self.frontend = nn.Sequential( res.conv1, res.bn1, res.relu, res.maxpool, res.layer1, res.layer2 ) - self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 23, stride=1) + self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 23, stride=1) self.own_reslayer_3.load_state_dict(res.layer3.state_dict()) - - - - def forward(self,x): + def forward(self, x): x = self.frontend(x) x = self.own_reslayer_3(x) @@ -51,37 +47,44 @@ def forward(self,x): x = self.convLR(x) x = self.output_layer(x) - x = F.upsample(x,scale_factor=8) + x = F.upsample(x, scale_factor=8) return x - - -def make_layers(cfg, in_channels = 3,batch_norm=False,dilation = False): + + +def make_layers(cfg, in_channels=3, batch_norm=False, dilation=False): if dilation: d_rate = 2 else: d_rate = 1 layers = [] for v in cfg: - if v == 'M': + if v == "M": layers += [nn.MaxPool2d(kernel_size=2, stride=2)] else: - conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=d_rate,dilation = d_rate) + conv2d = nn.Conv2d( + in_channels, v, kernel_size=3, padding=d_rate, dilation=d_rate + ) if batch_norm: layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] else: layers += [conv2d, nn.ReLU(inplace=True)] in_channels = v - return nn.Sequential(*layers) + return nn.Sequential(*layers) def make_res_layer(block, planes, blocks, stride=1): downsample = None - inplanes=512 + inplanes = 512 if stride != 1 or inplanes != planes * block.expansion: downsample = nn.Sequential( - nn.Conv2d(inplanes, planes * block.expansion, - kernel_size=1, stride=stride, bias=False), + nn.Conv2d( + inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), nn.BatchNorm2d(planes * block.expansion), ) @@ -91,7 +94,7 @@ def make_res_layer(block, planes, blocks, stride=1): for i in range(1, blocks): layers.append(block(inplanes, planes)) - return nn.Sequential(*layers) + return nn.Sequential(*layers) class Bottleneck(nn.Module): @@ -101,10 +104,13 @@ def __init__(self, inplanes, planes, stride=1, downsample=None): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) - self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, - padding=1, bias=False) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) self.bn2 = nn.BatchNorm2d(planes) - self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) self.bn3 = nn.BatchNorm2d(planes * self.expansion) self.relu = nn.ReLU(inplace=True) self.downsample = downsample @@ -130,4 +136,4 @@ def forward(self, x): out += residual out = self.relu(out) - return out \ No newline at end of file + return out diff --git a/models/SCC_Model/Res50.py b/models/SCC_Model/Res50.py index 11461d5..a2c6134 100644 --- a/models/SCC_Model/Res50.py +++ b/models/SCC_Model/Res50.py @@ -11,12 +11,15 @@ # model_path = '../PyTorch_Pretrained/resnet50-19c8e357.pth' + class Res50(nn.Module): - def __init__(self, pretrained=True): + def __init__(self, pretrained=True): super(Res50, self).__init__() - self.de_pred = nn.Sequential(Conv2d(1024, 128, 1, same_padding=True, NL='relu'), - Conv2d(128, 1, 1, same_padding=True, NL='relu')) + self.de_pred = nn.Sequential( + Conv2d(1024, 128, 1, same_padding=True, NL="relu"), + Conv2d(128, 1, 1, same_padding=True, NL="relu"), + ) initialize_weights(self.modules()) @@ -26,22 +29,18 @@ def __init__(self, pretrained=True): self.frontend = nn.Sequential( res.conv1, res.bn1, res.relu, res.maxpool, res.layer1, res.layer2 ) - self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 6, stride=1) + self.own_reslayer_3 = make_res_layer(Bottleneck, 256, 6, stride=1) self.own_reslayer_3.load_state_dict(res.layer3.state_dict()) - - - - def forward(self,x): + def forward(self, x): - x = self.frontend(x) x = self.own_reslayer_3(x) x = self.de_pred(x) - x = F.upsample(x,scale_factor=8) + x = F.upsample(x, scale_factor=8) return x def _initialize_weights(self): @@ -52,17 +51,22 @@ def _initialize_weights(self): m.bias.data.fill_(0) elif isinstance(m, nn.BatchNorm2d): m.weight.fill_(1) - m.bias.data.fill_(0) + m.bias.data.fill_(0) def make_res_layer(block, planes, blocks, stride=1): downsample = None - inplanes=512 + inplanes = 512 if stride != 1 or inplanes != planes * block.expansion: downsample = nn.Sequential( - nn.Conv2d(inplanes, planes * block.expansion, - kernel_size=1, stride=stride, bias=False), + nn.Conv2d( + inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), nn.BatchNorm2d(planes * block.expansion), ) @@ -72,7 +76,7 @@ def make_res_layer(block, planes, blocks, stride=1): for i in range(1, blocks): layers.append(block(inplanes, planes)) - return nn.Sequential(*layers) + return nn.Sequential(*layers) class Bottleneck(nn.Module): @@ -82,10 +86,13 @@ def __init__(self, inplanes, planes, stride=1, downsample=None): super(Bottleneck, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) - self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, - padding=1, bias=False) + self.conv2 = nn.Conv2d( + planes, planes, kernel_size=3, stride=stride, padding=1, bias=False + ) self.bn2 = nn.BatchNorm2d(planes) - self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.conv3 = nn.Conv2d( + planes, planes * self.expansion, kernel_size=1, bias=False + ) self.bn3 = nn.BatchNorm2d(planes * self.expansion) self.relu = nn.ReLU(inplace=True) self.downsample = downsample @@ -111,4 +118,4 @@ def forward(self, x): out += residual out = self.relu(out) - return out \ No newline at end of file + return out diff --git a/models/SCC_Model/VGG.py b/models/SCC_Model/VGG.py index 70abcb6..ec7c0d5 100644 --- a/models/SCC_Model/VGG.py +++ b/models/SCC_Model/VGG.py @@ -8,6 +8,7 @@ # model_path = '../PyTorch_Pretrained/vgg16-397923af.pth' + class VGG(nn.Module): def __init__(self, pretrained=True): super(VGG, self).__init__() @@ -17,16 +18,15 @@ def __init__(self, pretrained=True): features = list(vgg.features.children()) self.features4 = nn.Sequential(*features[0:23]) - - self.de_pred = nn.Sequential(Conv2d(512, 128, 1, same_padding=True, NL='relu'), - Conv2d(128, 1, 1, same_padding=True, NL='relu')) - - + self.de_pred = nn.Sequential( + Conv2d(512, 128, 1, same_padding=True, NL="relu"), + Conv2d(128, 1, 1, same_padding=True, NL="relu"), + ) def forward(self, x): - x = self.features4(x) + x = self.features4(x) x = self.de_pred(x) - x = F.upsample(x,scale_factor=8) + x = F.upsample(x, scale_factor=8) - return x \ No newline at end of file + return x diff --git a/models/SCC_Model/VGG_decoder.py b/models/SCC_Model/VGG_decoder.py index 44b724c..0c5f9d7 100644 --- a/models/SCC_Model/VGG_decoder.py +++ b/models/SCC_Model/VGG_decoder.py @@ -8,6 +8,7 @@ # model_path = '../PyTorch_Pretrained/vgg16-397923af.pth' + class VGG_decoder(nn.Module): def __init__(self, pretrained=True): super(VGG_decoder, self).__init__() @@ -17,19 +18,25 @@ def __init__(self, pretrained=True): features = list(vgg.features.children()) self.features4 = nn.Sequential(*features[0:23]) - - self.de_pred = nn.Sequential(Conv2d( 512, 128, 3, same_padding=True, NL='relu'), - nn.ConvTranspose2d(128,64,4,stride=2,padding=1,output_padding=0,bias=True), - nn.ReLU(), - nn.ConvTranspose2d(64,32,4,stride=2,padding=1,output_padding=0,bias=True), - nn.ReLU(), - nn.ConvTranspose2d(32,16,4,stride=2,padding=1,output_padding=0,bias=True), - nn.ReLU(), - Conv2d(16, 1, 1, same_padding=True, NL='relu')) - + self.de_pred = nn.Sequential( + Conv2d(512, 128, 3, same_padding=True, NL="relu"), + nn.ConvTranspose2d( + 128, 64, 4, stride=2, padding=1, output_padding=0, bias=True + ), + nn.ReLU(), + nn.ConvTranspose2d( + 64, 32, 4, stride=2, padding=1, output_padding=0, bias=True + ), + nn.ReLU(), + nn.ConvTranspose2d( + 32, 16, 4, stride=2, padding=1, output_padding=0, bias=True + ), + nn.ReLU(), + Conv2d(16, 1, 1, same_padding=True, NL="relu"), + ) def forward(self, x): - x = self.features4(x) + x = self.features4(x) x = self.de_pred(x) - return x \ No newline at end of file + return x diff --git a/models/vgg.py b/models/vgg.py new file mode 100644 index 0000000..e5ba5e0 --- /dev/null +++ b/models/vgg.py @@ -0,0 +1,56 @@ +import torch.nn as nn +import torch.utils.model_zoo as model_zoo +import torch +from torch.nn import functional as F + +__all__ = ['vgg19'] +model_urls = { + 'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth', +} + +class VGG(nn.Module): + def __init__(self, features): + super(VGG, self).__init__() + self.features = features + self.reg_layer = nn.Sequential( + nn.Conv2d(512, 256, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(256, 128, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(128, 1, 1) + ) + + def forward(self, x): + x = self.features(x) + x = F.upsample_bilinear(x, scale_factor=2) + x = self.reg_layer(x) + return torch.abs(x) + + +def make_layers(cfg, batch_norm=False): + layers = [] + in_channels = 3 + for v in cfg: + if v == 'M': + layers += [nn.MaxPool2d(kernel_size=2, stride=2)] + else: + conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) + if batch_norm: + layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] + else: + layers += [conv2d, nn.ReLU(inplace=True)] + in_channels = v + return nn.Sequential(*layers) + +cfg = { + 'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512] +} + +def vgg19(): + """VGG 19-layer model (configuration "E") + model pre-trained on ImageNet + """ + model = VGG(make_layers(cfg['E'])) + model.load_state_dict(model_zoo.load_url(model_urls['vgg19']), strict=False) + return model + diff --git a/preprocess_dataset.py b/preprocess_dataset.py new file mode 100644 index 0000000..add9478 --- /dev/null +++ b/preprocess_dataset.py @@ -0,0 +1,105 @@ +from scipy.io import loadmat +from PIL import Image +import numpy as np +import os +from glob import glob +import cv2 +import argparse + + +def cal_new_size(im_h, im_w, min_size, max_size): + if im_h < im_w: + if im_h < min_size: + ratio = 1.0 * min_size / im_h + im_h = min_size + im_w = round(im_w*ratio) + elif im_h > max_size: + ratio = 1.0 * max_size / im_h + im_h = max_size + im_w = round(im_w*ratio) + else: + ratio = 1.0 + else: + if im_w < min_size: + ratio = 1.0 * min_size / im_w + im_w = min_size + im_h = round(im_h*ratio) + elif im_w > max_size: + ratio = 1.0 * max_size / im_w + im_w = max_size + im_h = round(im_h*ratio) + else: + ratio = 1.0 + return im_h, im_w, ratio + + +def find_dis(point): + square = np.sum(point*points, axis=1) + dis = np.sqrt(np.maximum(square[:, None] - 2*np.matmul(point, point.T) + square[None, :], 0.0)) + dis = np.mean(np.partition(dis, 3, axis=1)[:, 1:4], axis=1, keepdims=True) + return dis + +def generate_data(im_path): + im = Image.open(im_path) + im_w, im_h = im.size + mat_path = im_path.replace('.jpg', '_ann.mat') + points = loadmat(mat_path)['annPoints'].astype(np.float32) + idx_mask = (points[:, 0] >= 0) * (points[:, 0] <= im_w) * (points[:, 1] >= 0) * (points[:, 1] <= im_h) + points = points[idx_mask] + im_h, im_w, rr = cal_new_size(im_h, im_w, min_size, max_size) + im = np.array(im) + if rr != 1.0: + im = cv2.resize(np.array(im), (im_w, im_h), cv2.INTER_CUBIC) + points = points * rr + return Image.fromarray(im), points + + +def parse_args(): + parser = argparse.ArgumentParser(description='Test ') + parser.add_argument('--origin-dir', default='/home/teddy/UCF-QNRF_ECCV18', + help='original data directory') + parser.add_argument('--data-dir', default='/home/teddy/UCF-Train-Val-Test', + help='processed data directory') + args = parser.parse_args() + return args + +if __name__ == '__main__': + args = parse_args() + save_dir = args.data_dir + min_size = 512 + max_size = 2048 + + for phase in ['Train', 'Test']: + sub_dir = os.path.join(args.origin_dir, phase) + if phase == 'Train': + sub_phase_list = ['train', 'val'] + for sub_phase in sub_phase_list: + sub_save_dir = os.path.join(save_dir, sub_phase) + if not os.path.exists(sub_save_dir): + os.makedirs(sub_save_dir) + with open('{}.txt'.format(sub_phase)) as f: + for i in f: + im_path = os.path.join(sub_dir, i.strip()) + name = os.path.basename(im_path) + print(name) + im, points = generate_data(im_path) + if sub_phase == 'train': + dis = find_dis(points) + points = np.concatenate((points, dis), axis=1) + im_save_path = os.path.join(sub_save_dir, name) + im.save(im_save_path) + gd_save_path = im_save_path.replace('jpg', 'npy') + np.save(gd_save_path, points) + else: + sub_save_dir = os.path.join(save_dir, 'test') + if not os.path.exists(sub_save_dir): + os.makedirs(sub_save_dir) + im_list = glob(os.path.join(sub_dir, '*jpg')) + for im_path in im_list: + name = os.path.basename(im_path) + print(name) + im, points = generate_data(im_path) + im_save_path = os.path.join(sub_save_dir, name) + im.save(im_save_path) + gd_save_path = im_save_path.replace('jpg', 'npy') + np.save(gd_save_path, points) diff --git a/test.py b/test.py index 148a2a5..5c62071 100644 --- a/test.py +++ b/test.py @@ -18,140 +18,150 @@ torch.cuda.set_device(0) torch.backends.cudnn.benchmark = True -exp_name = '../SHHB_results' +exp_name = "../SHHB_results" if not os.path.exists(exp_name): os.mkdir(exp_name) -if not os.path.exists(exp_name+'/pred'): - os.mkdir(exp_name+'/pred') - -if not os.path.exists(exp_name+'/gt'): - os.mkdir(exp_name+'/gt') - -mean_std = ([0.452016860247, 0.447249650955, 0.431981861591],[0.23242045939, 0.224925786257, 0.221840232611]) -img_transform = standard_transforms.Compose([ - standard_transforms.ToTensor(), - standard_transforms.Normalize(*mean_std) - ]) -restore = standard_transforms.Compose([ - own_transforms.DeNormalize(*mean_std), - standard_transforms.ToPILImage() - ]) +if not os.path.exists(exp_name + "/pred"): + os.mkdir(exp_name + "/pred") + +if not os.path.exists(exp_name + "/gt"): + os.mkdir(exp_name + "/gt") + +mean_std = ( + [0.452016860247, 0.447249650955, 0.431981861591], + [0.23242045939, 0.224925786257, 0.221840232611], +) +img_transform = standard_transforms.Compose( + [standard_transforms.ToTensor(), standard_transforms.Normalize(*mean_std)] +) +restore = standard_transforms.Compose( + [own_transforms.DeNormalize(*mean_std), standard_transforms.ToPILImage()] +) pil_to_tensor = standard_transforms.ToTensor() -dataRoot = '../ProcessedData/shanghaitech_part_B/test' +dataRoot = "../ProcessedData/shanghaitech_part_B/test" + +model_path = "../08-SANet_all_ep_57_mae_42.4_mse_85.4.pth" -model_path = 'xxx.pth' def main(): - - file_list = [filename for root,dirs,filename in os.walk(dataRoot+'/img/')] + + file_list = [filename for root, dirs, filename in os.walk(dataRoot + "/img/")] test(file_list[0], model_path) - + def test(file_list, model_path): - net = CrowdCounter(cfg.GPU_ID,cfg.NET) - net.load_state_dict(torch.load(model_path)) + net = CrowdCounter(cfg.GPU_ID, cfg.NET) + + state_dict = torch.load(model_path) + state_dict = {k.replace("module.", ""):v for k,v in state_dict.items()} + net.load_state_dict(state_dict) net.cuda() net.eval() - f1 = plt.figure(1) gts = [] preds = [] for filename in file_list: - print( filename ) - imgname = dataRoot + '/img/' + filename - filename_no_ext = filename.split('.')[0] + print(filename) + imgname = dataRoot + "/img/" + filename + filename_no_ext = filename.split(".")[0] - denname = dataRoot + '/den/' + filename_no_ext + '.csv' + denname = dataRoot + "/den/" + filename_no_ext + ".csv" - den = pd.read_csv(denname, sep=',',header=None).values + den = pd.read_csv(denname, sep=",", header=None).values den = den.astype(np.float32, copy=False) img = Image.open(imgname) - if img.mode == 'L': - img = img.convert('RGB') - + if img.mode == "L": + img = img.convert("RGB") img = img_transform(img) gt = np.sum(den) with torch.no_grad(): - img = Variable(img[None,:,:,:]).cuda() + img = Variable(img[None, :, :, :]).cuda() pred_map = net.test_forward(img) - sio.savemat(exp_name+'/pred/'+filename_no_ext+'.mat',{'data':pred_map.squeeze().cpu().numpy()/100.}) - sio.savemat(exp_name+'/gt/'+filename_no_ext+'.mat',{'data':den}) + sio.savemat( + exp_name + "/pred/" + filename_no_ext + ".mat", + {"data": pred_map.squeeze().cpu().numpy() / 100.0}, + ) + sio.savemat(exp_name + "/gt/" + filename_no_ext + ".mat", {"data": den}) - pred_map = pred_map.cpu().data.numpy()[0,0,:,:] + pred_map = pred_map.cpu().data.numpy()[0, 0, :, :] + pred = np.sum(pred_map) / 100.0 + pred_map = pred_map / np.max(pred_map + 1e-20) - pred = np.sum(pred_map)/100.0 - pred_map = pred_map/np.max(pred_map+1e-20) - - den = den/np.max(den+1e-20) + den = den / np.max(den + 1e-20) - den_frame = plt.gca() - plt.imshow(den, 'jet') + plt.imshow(den, "jet") den_frame.axes.get_yaxis().set_visible(False) den_frame.axes.get_xaxis().set_visible(False) - den_frame.spines['top'].set_visible(False) - den_frame.spines['bottom'].set_visible(False) - den_frame.spines['left'].set_visible(False) - den_frame.spines['right'].set_visible(False) - plt.savefig(exp_name+'/'+filename_no_ext+'_gt_'+str(int(gt))+'.png',\ - bbox_inches='tight',pad_inches=0,dpi=150) + den_frame.spines["top"].set_visible(False) + den_frame.spines["bottom"].set_visible(False) + den_frame.spines["left"].set_visible(False) + den_frame.spines["right"].set_visible(False) + plt.savefig( + exp_name + "/" + filename_no_ext + "_gt_" + str(int(gt)) + ".png", + bbox_inches="tight", + pad_inches=0, + dpi=150, + ) plt.close() - + # sio.savemat(exp_name+'/'+filename_no_ext+'_gt_'+str(int(gt))+'.mat',{'data':den}) pred_frame = plt.gca() - plt.imshow(pred_map, 'jet') + plt.imshow(pred_map, "jet") pred_frame.axes.get_yaxis().set_visible(False) pred_frame.axes.get_xaxis().set_visible(False) - pred_frame.spines['top'].set_visible(False) - pred_frame.spines['bottom'].set_visible(False) - pred_frame.spines['left'].set_visible(False) - pred_frame.spines['right'].set_visible(False) - plt.savefig(exp_name+'/'+filename_no_ext+'_pred_'+str(float(pred))+'.png',\ - bbox_inches='tight',pad_inches=0,dpi=150) + pred_frame.spines["top"].set_visible(False) + pred_frame.spines["bottom"].set_visible(False) + pred_frame.spines["left"].set_visible(False) + pred_frame.spines["right"].set_visible(False) + plt.savefig( + exp_name + "/" + filename_no_ext + "_pred_" + str(float(pred)) + ".png", + bbox_inches="tight", + pad_inches=0, + dpi=150, + ) plt.close() # sio.savemat(exp_name+'/'+filename_no_ext+'_pred_'+str(float(pred))+'.mat',{'data':pred_map}) - diff = den-pred_map + diff = den - pred_map diff_frame = plt.gca() - plt.imshow(diff, 'jet') + plt.imshow(diff, "jet") plt.colorbar() diff_frame.axes.get_yaxis().set_visible(False) diff_frame.axes.get_xaxis().set_visible(False) - diff_frame.spines['top'].set_visible(False) - diff_frame.spines['bottom'].set_visible(False) - diff_frame.spines['left'].set_visible(False) - diff_frame.spines['right'].set_visible(False) - plt.savefig(exp_name+'/'+filename_no_ext+'_diff.png',\ - bbox_inches='tight',pad_inches=0,dpi=150) + diff_frame.spines["top"].set_visible(False) + diff_frame.spines["bottom"].set_visible(False) + diff_frame.spines["left"].set_visible(False) + diff_frame.spines["right"].set_visible(False) + plt.savefig( + exp_name + "/" + filename_no_ext + "_diff.png", + bbox_inches="tight", + pad_inches=0, + dpi=150, + ) plt.close() # sio.savemat(exp_name+'/'+filename_no_ext+'_diff.mat',{'data':diff}) - - -if __name__ == '__main__': +if __name__ == "__main__": main() - - - - diff --git a/test_bay.py b/test_bay.py new file mode 100644 index 0000000..1fb5c12 --- /dev/null +++ b/test_bay.py @@ -0,0 +1,48 @@ +import torch +import os +import numpy as np +from datasets.crowd_sh import Crowd +from models.vgg import vgg19 +import argparse + +args = None + + +def parse_args(): + parser = argparse.ArgumentParser(description='Test ') + parser.add_argument('--data-dir', default='/home/teddy/UCF-Train-Val-Test', + help='training data directory') + parser.add_argument('--save-dir', default='/home/teddy/vgg', + help='model directory') + parser.add_argument('--device', default='0', help='assign device') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + os.environ['CUDA_VISIBLE_DEVICES'] = args.device.strip() # set vis gpu + + datasets = Crowd(os.path.join(args.data_dir, 'test'), 512, 8, is_gray=False, method='val') + dataloader = torch.utils.data.DataLoader(datasets, 1, shuffle=False, + num_workers=8, pin_memory=False) + model = vgg19() + device = torch.device('cuda') + model.to(device) + model.load_state_dict(torch.load(os.path.join(args.save_dir, 'best_model.pth'), device)) + epoch_minus = [] + + for inputs, count, name in dataloader: + inputs = inputs.to(device) + assert inputs.size(0) == 1, 'the batch size should equal to 1' + with torch.set_grad_enabled(False): + outputs = model(inputs) + temp_minu = count[0].item() - torch.sum(outputs).item() + print(name, temp_minu, count[0].item(), torch.sum(outputs).item()) + epoch_minus.append(temp_minu) + + epoch_minus = np.array(epoch_minus) + mse = np.sqrt(np.mean(np.square(epoch_minus))) + mae = np.mean(np.abs(epoch_minus)) + log_str = 'Final Test: mae {}, mse {}'.format(mae, mse) + print(log_str) diff --git a/train.py b/train.py index 9462495..50def36 100644 --- a/train.py +++ b/train.py @@ -1,10 +1,11 @@ import os import numpy as np import torch +import imp from config import cfg -#------------prepare enviroment------------ +# ------------prepare enviroment------------ seed = cfg.SEED if seed is not None: np.random.seed(seed) @@ -12,52 +13,42 @@ torch.cuda.manual_seed(seed) gpus = cfg.GPU_ID -if len(gpus)==1: +if len(gpus) == 1: torch.cuda.set_device(gpus[0]) torch.backends.cudnn.benchmark = True -#------------prepare data loader------------ +# ------------prepare data loader------------ data_mode = cfg.DATASET -if data_mode is 'SHHA': - from datasets.SHHA.loading_data import loading_data - from datasets.SHHA.setting import cfg_data -elif data_mode is 'SHHB': - from datasets.SHHB.loading_data import loading_data - from datasets.SHHB.setting import cfg_data -elif data_mode is 'QNRF': - from datasets.QNRF.loading_data import loading_data - from datasets.QNRF.setting import cfg_data -elif data_mode is 'UCF50': - from datasets.UCF50.loading_data import loading_data - from datasets.UCF50.setting import cfg_data -elif data_mode is 'WE': - from datasets.WE.loading_data import loading_data - from datasets.WE.setting import cfg_data -elif data_mode is 'GCC': - from datasets.GCC.loading_data import loading_data - from datasets.GCC.setting import cfg_data -elif data_mode is 'Mall': - from datasets.Mall.loading_data import loading_data - from datasets.Mall.setting import cfg_data -elif data_mode is 'UCSD': - from datasets.UCSD.loading_data import loading_data - from datasets.UCSD.setting import cfg_data - - -#------------Prepare Trainer------------ + +dataset_import_path = "datasets/" + data_mode +loading_data = getattr( + imp.load_source("loader", dataset_import_path + "/loading_data.py"), "loading_data" +) +cfg_data = getattr( + imp.load_source("settings", dataset_import_path + "/setting.py"), "cfg_data" +) + +# ------------Prepare Trainer------------ net = cfg.NET -if net in ['MCNN', 'AlexNet', 'VGG', 'VGG_DECODER', 'Res50', 'Res101', 'CSRNet','Res101_SFCN']: +if net in [ + "MCNN", + "AlexNet", + "VGG", + "VGG_DECODER", + "Res50", + "Res101", + "CSRNet", + "Res101_SFCN", +]: from trainer import Trainer -elif net in ['SANet']: - from trainer_for_M2TCC import Trainer # double losses but signle output -elif net in ['CMTL']: - from trainer_for_CMTL import Trainer # double losses and double outputs -elif net in ['PCCNet']: - from trainer_for_M3T3OCC import Trainer - -#------------Start Training------------ +elif net in ["SANet"]: + from trainer_for_M2TCC import Trainer # double losses but single output +elif net in ["CMTL"]: + from trainer_for_CMTL import Trainer # double losses and double outputs + +# ------------Start Training------------ pwd = os.path.split(os.path.realpath(__file__))[0] -cc_trainer = Trainer(loading_data,cfg_data,pwd) +cc_trainer = Trainer(loading_data, cfg_data, pwd) cc_trainer.forward() diff --git a/train_bay.py b/train_bay.py new file mode 100644 index 0000000..90736de --- /dev/null +++ b/train_bay.py @@ -0,0 +1,59 @@ +from utils.regression_trainer import RegTrainer +import argparse +import os +import torch +args = None + +def parse_args(): + parser = argparse.ArgumentParser(description='Train ') + parser.add_argument('--data-dir', default='/home/teddy/UCF-Train-Val-Test', + help='training data directory') + parser.add_argument('--save-dir', default='/home/teddy/vgg', + help='directory to save models.') + + parser.add_argument('--lr', type=float, default=1e-5, + help='the initial learning rate') + parser.add_argument('--weight-decay', type=float, default=1e-4, + help='the weight decay') + parser.add_argument('--resume', default='', + help='the path of resume training model') + parser.add_argument('--max-model-num', type=int, default=1, + help='max models num to save ') + parser.add_argument('--max-epoch', type=int, default=1000, + help='max training epoch') + parser.add_argument('--val-epoch', type=int, default=5, + help='the num of steps to log training information') + parser.add_argument('--val-start', type=int, default=600, + help='the epoch start to val') + + parser.add_argument('--batch-size', type=int, default=1, + help='train batch size') + parser.add_argument('--device', default='0', help='assign device') + parser.add_argument('--num-workers', type=int, default=8, + help='the num of training process') + + parser.add_argument('--is-gray', type=bool, default=False, + help='whether the input image is gray') + parser.add_argument('--crop-size', type=int, default=512, + help='the crop size of the train image') + parser.add_argument('--downsample-ratio', type=int, default=8, + help='downsample ratio') + + parser.add_argument('--use-background', type=bool, default=True, + help='whether to use background modelling') + parser.add_argument('--sigma', type=float, default=8.0, + help='sigma for likelihood') + parser.add_argument('--background-ratio', type=float, default=1.0, + help='background ratio') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + print(args.batch_size) + torch.backends.cudnn.benchmark = True + os.environ['CUDA_VISIBLE_DEVICES'] = args.device.strip() # set vis gpu + trainer = RegTrainer(args) + trainer.setup() + trainer.train() diff --git a/trainer.py b/trainer.py index ef2d3e1..151a37a 100644 --- a/trainer.py +++ b/trainer.py @@ -6,85 +6,95 @@ from torch.optim.lr_scheduler import StepLR from models.CC import CrowdCounter -from config import cfg +from config import cfg as default_cfg from misc.utils import * import pdb -class Trainer(): - def __init__(self, dataloader, cfg_data, pwd): +class Trainer: + def __init__(self, dataloader, cfg_data, pwd, cfg=None): self.cfg_data = cfg_data - - self.data_mode = cfg.DATASET - self.exp_name = cfg.EXP_NAME - self.exp_path = cfg.EXP_PATH + if cfg is None: + self.cfg = default_cfg + else: + self.cfg = cfg + + self.data_mode = self.cfg.DATASET + self.exp_name = self.cfg.EXP_NAME + self.exp_path = self.cfg.EXP_PATH self.pwd = pwd - self.net_name = cfg.NET - self.net = CrowdCounter(cfg.GPU_ID,self.net_name).cuda() - self.optimizer = optim.Adam(self.net.CCN.parameters(), lr=cfg.LR, weight_decay=1e-4) - # self.optimizer = optim.SGD(self.net.parameters(), cfg.LR, momentum=0.95,weight_decay=5e-4) - self.scheduler = StepLR(self.optimizer, step_size=cfg.NUM_EPOCH_LR_DECAY, gamma=cfg.LR_DECAY) - - self.train_record = {'best_mae': 1e20, 'best_mse':1e20, 'best_model_name': ''} - self.timer = {'iter time' : Timer(),'train time' : Timer(),'val time' : Timer()} + self.net_name = self.cfg.NET + self.net = CrowdCounter(self.cfg.GPU_ID, self.net_name).cuda() + self.optimizer = optim.Adam( + self.net.CCN.parameters(), lr=self.cfg.LR, weight_decay=1e-4 + ) + # self.optimizer = optim.SGD(self.net.parameters(), self.cfg.LR, momentum=0.95,weight_decay=5e-4) + self.scheduler = StepLR( + self.optimizer, + step_size=self.cfg.NUM_EPOCH_LR_DECAY, + gamma=self.cfg.LR_DECAY, + ) + + self.train_record = {"best_mae": 1e20, "best_mse": 1e20, "best_model_name": ""} + self.timer = {"iter time": Timer(), "train time": Timer(), "val time": Timer()} self.epoch = 0 self.i_tb = 0 - - if cfg.PRE_GCC: - self.net.load_state_dict(torch.load(cfg.PRE_GCC_MODEL)) - self.train_loader, self.val_loader, self.restore_transform = dataloader() + if self.cfg.PRE_GCC: + self.net.load_state_dict(torch.load(self.cfg.PRE_GCC_MODEL)) - if cfg.RESUME: - latest_state = torch.load(cfg.RESUME_PATH) - self.net.load_state_dict(latest_state['net']) - self.optimizer.load_state_dict(latest_state['optimizer']) - self.scheduler.load_state_dict(latest_state['scheduler']) - self.epoch = latest_state['epoch'] + 1 - self.i_tb = latest_state['i_tb'] - self.train_record = latest_state['train_record'] - self.exp_path = latest_state['exp_path'] - self.exp_name = latest_state['exp_name'] + self.train_loader, self.val_loader, self.restore_transform = dataloader(cfg_data=cfg_data) - self.writer, self.log_txt = logger(self.exp_path, self.exp_name, self.pwd, 'exp', resume=cfg.RESUME) + if self.cfg.RESUME: + latest_state = torch.load(self.cfg.RESUME_PATH) + self.net.load_state_dict(latest_state["net"]) + self.optimizer.load_state_dict(latest_state["optimizer"]) + self.scheduler.load_state_dict(latest_state["scheduler"]) + self.epoch = latest_state["epoch"] + 1 + self.i_tb = latest_state["i_tb"] + self.train_record = latest_state["train_record"] + self.exp_path = latest_state["exp_path"] + self.exp_name = latest_state["exp_name"] + self.writer, self.log_txt = logger( + self.exp_path, self.exp_name, self.pwd, "exp", resume=self.cfg.RESUME + ) def forward(self): # self.validate_V3() - for epoch in range(self.epoch,cfg.MAX_EPOCH): + for epoch in range(self.epoch, self.cfg.MAX_EPOCH): self.epoch = epoch - if epoch > cfg.LR_DECAY_START: + if epoch > self.cfg.LR_DECAY_START: self.scheduler.step() - - # training - self.timer['train time'].tic() + + # training + self.timer["train time"].tic() self.train() - self.timer['train time'].toc(average=False) + self.timer["train time"].toc(average=False) - print( 'train time: {:.2f}s'.format(self.timer['train time'].diff) ) - print( '='*20 ) + print("train time: {:.2f}s".format(self.timer["train time"].diff)) + print("=" * 20) # validation - if epoch%cfg.VAL_FREQ==0 or epoch>cfg.VAL_DENSE_START: - self.timer['val time'].tic() - if self.data_mode in ['SHHA', 'SHHB', 'QNRF', 'UCF50']: + if epoch % self.cfg.VAL_FREQ == 0 or epoch > self.cfg.VAL_DENSE_START: + self.timer["val time"].tic() + if self.data_mode in ["SHHA", "SHHB", "QNRF", "UCF50"]: self.validate_V1() - elif self.data_mode is 'WE': + elif self.data_mode == "WE": self.validate_V2() - elif self.data_mode is 'GCC': + elif self.data_mode == "GCC": self.validate_V3() - self.timer['val time'].toc(average=False) - print( 'val time: {:.2f}s'.format(self.timer['val time'].diff) ) - + self.timer["val time"].toc(average=False) + print("val time: {:.2f}s".format(self.timer["val time"].diff)) - def train(self): # training for all datasets + def train(self): # training for all datasets self.net.train() for i, data in enumerate(self.train_loader, 0): - self.timer['iter time'].tic() + self.timer["iter time"].tic() img, gt_map = data img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() @@ -95,19 +105,32 @@ def train(self): # training for all datasets loss.backward() self.optimizer.step() - if (i + 1) % cfg.PRINT_FREQ == 0: + if (i + 1) % self.cfg.PRINT_FREQ == 0: self.i_tb += 1 - self.writer.add_scalar('train_loss', loss.item(), self.i_tb) - self.timer['iter time'].toc(average=False) - print( '[ep %d][it %d][loss %.4f][lr %.4f][%.2fs]' % \ - (self.epoch + 1, i + 1, loss.item(), self.optimizer.param_groups[0]['lr']*10000, self.timer['iter time'].diff) ) - print( ' [cnt: gt: %.1f pred: %.2f]' % (gt_map[0].sum().data/self.cfg_data.LOG_PARA, pred_map[0].sum().data/self.cfg_data.LOG_PARA) ) - - - def validate_V1(self):# validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 + self.writer.add_scalar("train_loss", loss.item(), self.i_tb) + self.timer["iter time"].toc(average=False) + print( + "[ep %d][it %d][loss %.4f][lr %.4f][%.2fs]" + % ( + self.epoch + 1, + i + 1, + loss.item(), + self.optimizer.param_groups[0]["lr"] * 10000, + self.timer["iter time"].diff, + ) + ) + print( + " [cnt: gt: %.1f pred: %.2f]" + % ( + gt_map[0].sum().data / self.cfg_data.LOG_PARA, + pred_map[0].sum().data / self.cfg_data.LOG_PARA, + ) + ) + + def validate_V1(self): # validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 self.net.eval() - + losses = AverageMeter() maes = AverageMeter() mses = AverageMeter() @@ -119,37 +142,53 @@ def validate_V1(self):# validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() for i_img in range(pred_map.shape[0]): - - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA - + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA + losses.update(self.net.loss.item()) - maes.update(abs(gt_count-pred_cnt)) - mses.update((gt_count-pred_cnt)*(gt_count-pred_cnt)) - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - + maes.update(abs(gt_count - pred_cnt)) + mses.update((gt_count - pred_cnt) * (gt_count - pred_cnt)) + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + mae = maes.avg mse = np.sqrt(mses.avg) loss = losses.avg - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) - print_summary(self.exp_name,[mae, mse, loss],self.train_record) - - - def validate_V2(self):# validate_V2 for WE + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) + print_summary(self.exp_name, [mae, mse, loss], self.train_record) + + def validate_V2(self): # validate_V2 for WE self.net.eval() @@ -157,13 +196,18 @@ def validate_V2(self):# validate_V2 for WE maes = AverageCategoryMeter(5) roi_mask = [] - from datasets.WE.setting import cfg_data + from datasets.WE.setting import cfg_data from scipy import io as sio + for val_folder in cfg_data.VAL_FOLDER: - roi_mask.append(sio.loadmat(os.path.join(cfg_data.DATA_PATH,'test',val_folder + '_roi.mat'))['BW']) - - for i_sub,i_loader in enumerate(self.val_loader,0): + roi_mask.append( + sio.loadmat( + os.path.join(cfg_data.DATA_PATH, "test", val_folder + "_roi.mat") + )["BW"] + ) + + for i_sub, i_loader in enumerate(self.val_loader, 0): mask = roi_mask[i_sub] for vi, data in enumerate(i_loader, 0): @@ -173,51 +217,74 @@ def validate_V2(self):# validate_V2 for WE img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() for i_img in range(pred_map.shape[0]): - - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA - - losses.update(self.net.loss.item(),i_sub) - maes.update(abs(gt_count-pred_cnt),i_sub) - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - - mae = np.average(maes.avg) - loss = np.average(losses.avg) - - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mae_s1', maes.avg[0], self.epoch + 1) - self.writer.add_scalar('mae_s2', maes.avg[1], self.epoch + 1) - self.writer.add_scalar('mae_s3', maes.avg[2], self.epoch + 1) - self.writer.add_scalar('mae_s4', maes.avg[3], self.epoch + 1) - self.writer.add_scalar('mae_s5', maes.avg[4], self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, 0, loss],self.train_record,self.log_txt) - print_WE_summary(self.log_txt,self.epoch,[mae, 0, loss],self.train_record,maes) - - + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA + + losses.update(self.net.loss.item(), i_sub) + maes.update(abs(gt_count - pred_cnt), i_sub) + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + mae = np.average(maes.avg) + loss = np.average(losses.avg) - def validate_V3(self):# validate_V3 for GCC + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mae_s1", maes.avg[0], self.epoch + 1) + self.writer.add_scalar("mae_s2", maes.avg[1], self.epoch + 1) + self.writer.add_scalar("mae_s3", maes.avg[2], self.epoch + 1) + self.writer.add_scalar("mae_s4", maes.avg[3], self.epoch + 1) + self.writer.add_scalar("mae_s5", maes.avg[4], self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, 0, loss], + self.train_record, + self.log_txt, + ) + print_WE_summary( + self.log_txt, self.epoch, [mae, 0, loss], self.train_record, maes + ) + + def validate_V3(self): # validate_V3 for GCC self.net.eval() - + losses = AverageMeter() maes = AverageMeter() mses = AverageMeter() - c_maes = {'level':AverageCategoryMeter(9), 'time':AverageCategoryMeter(8),'weather':AverageCategoryMeter(7)} - c_mses = {'level':AverageCategoryMeter(9), 'time':AverageCategoryMeter(8),'weather':AverageCategoryMeter(7)} - + c_maes = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } + c_mses = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } for vi, data in enumerate(self.val_loader, 0): img, gt_map, attributes_pt = data @@ -226,46 +293,67 @@ def validate_V3(self):# validate_V3 for GCC img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() for i_img in range(pred_map.shape[0]): - - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA - s_mae = abs(gt_count-pred_cnt) - s_mse = (gt_count-pred_cnt)*(gt_count-pred_cnt) + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA + + s_mae = abs(gt_count - pred_cnt) + s_mse = (gt_count - pred_cnt) * (gt_count - pred_cnt) losses.update(self.net.loss.item()) maes.update(s_mae) - mses.update(s_mse) - attributes_pt = attributes_pt.squeeze() - c_maes['level'].update(s_mae,attributes_pt[i_img][0]) - c_mses['level'].update(s_mse,attributes_pt[i_img][0]) - c_maes['time'].update(s_mae,attributes_pt[i_img][1]/3) - c_mses['time'].update(s_mse,attributes_pt[i_img][1]/3) - c_maes['weather'].update(s_mae,attributes_pt[i_img][2]) - c_mses['weather'].update(s_mse,attributes_pt[i_img][2]) - - - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - + mses.update(s_mse) + attributes_pt = attributes_pt.squeeze() + c_maes["level"].update(s_mae, attributes_pt[i_img][0]) + c_mses["level"].update(s_mse, attributes_pt[i_img][0]) + c_maes["time"].update(s_mae, attributes_pt[i_img][1] / 3) + c_mses["time"].update(s_mse, attributes_pt[i_img][1] / 3) + c_maes["weather"].update(s_mae, attributes_pt[i_img][2]) + c_mses["weather"].update(s_mse, attributes_pt[i_img][2]) + + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + loss = losses.avg mae = maes.avg mse = np.sqrt(mses.avg) - - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) - - - print_GCC_summary(self.log_txt,self.epoch,[mae, mse, loss],self.train_record,c_maes,c_mses) + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) + + print_GCC_summary( + self.log_txt, + self.epoch, + [mae, mse, loss], + self.train_record, + c_maes, + c_mses, + ) diff --git a/trainer_for_CMTL.py b/trainer_for_CMTL.py index fb9ada4..7917363 100644 --- a/trainer_for_CMTL.py +++ b/trainer_for_CMTL.py @@ -6,72 +6,81 @@ from torch.optim.lr_scheduler import StepLR from models.M2T2OCC import CrowdCounter -from config import cfg +from config import cfg as default_cfg from misc.utils import * import pdb - - -class Trainer(): - def __init__(self, dataloader, cfg_data, pwd): +class Trainer: + def __init__(self, dataloader, cfg_data, pwd, cfg=None): self.cfg_data = cfg_data - - self.data_mode = cfg.DATASET - self.exp_name = cfg.EXP_NAME - self.exp_path = cfg.EXP_PATH + if cfg is None: + self.cfg = default_cfg + else: + self.cfg = cfg + + self.data_mode = self.cfg.DATASET + self.exp_name = self.cfg.EXP_NAME + self.exp_path = self.cfg.EXP_PATH self.pwd = pwd - self.net_name = cfg.NET + self.net_name = self.cfg.NET - self.train_loader, self.val_loader, self.restore_transform = dataloader() + self.train_loader, self.val_loader, self.restore_transform = dataloader(cfg_data=cfg_data) - - if self.net_name in ['CMTL']: + if self.net_name in ["CMTL"]: # use for gt's class labeling - self.max_gt_count = 0. - self.min_gt_count = 0x7f7f7f + self.max_gt_count = 0.0 + self.min_gt_count = 0x7F7F7F self.num_classes = 10 - self.bin_val = 0. + self.bin_val = 0.0 self.pre_max_min_bin_val() ce_weights = torch.from_numpy(self.pre_weights()).float() loss_1_fn = nn.MSELoss() - + loss_2_fn = nn.BCELoss(weight=ce_weights) - self.net = CrowdCounter(cfg.GPU_ID, self.net_name,loss_1_fn,loss_2_fn).cuda() - self.optimizer = optim.Adam(self.net.CCN.parameters(), lr=cfg.LR, weight_decay=1e-4) + self.net = CrowdCounter( + self.cfg.GPU_ID, self.net_name, loss_1_fn, loss_2_fn + ).cuda() + self.optimizer = optim.Adam( + self.net.CCN.parameters(), lr=self.cfg.LR, weight_decay=1e-4 + ) # self.optimizer = optim.SGD(self.net.parameters(), cfg.LR, momentum=0.95,weight_decay=5e-4) - self.scheduler = StepLR(self.optimizer, step_size=cfg.NUM_EPOCH_LR_DECAY, gamma=cfg.LR_DECAY) + self.scheduler = StepLR( + self.optimizer, + step_size=self.cfg.NUM_EPOCH_LR_DECAY, + gamma=self.cfg.LR_DECAY, + ) - self.train_record = {'best_mae': 1e20, 'best_mse': 1e20, 'best_model_name': ''} - self.timer = {'iter time': Timer(), 'train time': Timer(), 'val time': Timer()} + self.train_record = {"best_mae": 1e20, "best_mse": 1e20, "best_model_name": ""} + self.timer = {"iter time": Timer(), "train time": Timer(), "val time": Timer()} self.i_tb = 0 self.epoch = 0 - if cfg.PRE_GCC: - self.net.load_state_dict(torch.load(cfg.PRE_GCC_MODEL)) - - if cfg.RESUME: - latest_state = torch.load(cfg.RESUME_PATH) - self.net.load_state_dict(latest_state['net']) - self.optimizer.load_state_dict(latest_state['optimizer']) - self.scheduler.load_state_dict(latest_state['scheduler']) - self.epoch = latest_state['epoch'] + 1 - self.i_tb = latest_state['i_tb'] - self.train_record = latest_state['train_record'] - self.exp_path = latest_state['exp_path'] - self.exp_name = latest_state['exp_name'] - - self.writer, self.log_txt = logger(self.exp_path, self.exp_name, self.pwd, 'exp', resume=cfg.RESUME) + if self.cfg.PRE_GCC: + self.net.load_state_dict(torch.load(self.cfg.PRE_GCC_MODEL)) + if self.cfg.RESUME: + latest_state = torch.load(self.cfg.RESUME_PATH) + self.net.load_state_dict(latest_state["net"]) + self.optimizer.load_state_dict(latest_state["optimizer"]) + self.scheduler.load_state_dict(latest_state["scheduler"]) + self.epoch = latest_state["epoch"] + 1 + self.i_tb = latest_state["i_tb"] + self.train_record = latest_state["train_record"] + self.exp_path = latest_state["exp_path"] + self.exp_name = latest_state["exp_name"] + self.writer, self.log_txt = logger( + self.exp_path, self.exp_name, self.pwd, "exp", resume=self.cfg.RESUME + ) def pre_max_min_bin_val(self): for i, data in enumerate(self.train_loader, 0): @@ -86,8 +95,8 @@ def pre_max_min_bin_val(self): elif temp_count < self.min_gt_count: self.min_gt_count = temp_count - print( '[max_gt: %.2f min_gt: %.2f]' % (self.max_gt_count, self.min_gt_count) ) - self.bin_val = (self.max_gt_count - self.min_gt_count)/float(self.num_classes) + print("[max_gt: %.2f min_gt: %.2f]" % (self.max_gt_count, self.min_gt_count)) + self.bin_val = (self.max_gt_count - self.min_gt_count) / float(self.num_classes) def pre_weights(self): count_class_hist = np.zeros(self.num_classes) @@ -96,18 +105,19 @@ def pre_weights(self): _, gt_map = data for j in range(0, gt_map.size()[0]): temp_count = gt_map[j].sum() / self.cfg_data.LOG_PARA - class_idx = min(int(temp_count/self.bin_val), self.num_classes-1) + class_idx = min( + int(temp_count / self.bin_val), self.num_classes - 1 + ) count_class_hist[class_idx] += 1 wts = count_class_hist - wts = 1-wts/(sum(wts)); - wts = wts/sum(wts); - print( 'pre_wts:' ) - print( wts ) + wts = 1 - wts / (sum(wts)) + wts = wts / sum(wts) + print("pre_wts:") + print(wts) return wts - def online_assign_gt_class_labels(self, gt_map_batch): batch = gt_map_batch.size()[0] # pdb.set_trace() @@ -116,78 +126,91 @@ def online_assign_gt_class_labels(self, gt_map_batch): for i in range(0, batch): # pdb.set_trace() - gt_count = (gt_map_batch[i].sum().item() / self.cfg_data.LOG_PARA) + gt_count = gt_map_batch[i].sum().item() / self.cfg_data.LOG_PARA # generate gt's label same as implement of CMTL by Viswa gt_class_label = np.zeros(self.num_classes, dtype=np.int) # bin_val = ((self.max_gt_count - self.min_gt_count)/float(self.num_classes)) - class_idx = min(int(gt_count/self.bin_val), self.num_classes-1) + class_idx = min(int(gt_count / self.bin_val), self.num_classes - 1) gt_class_label[class_idx] = 1 # pdb.set_trace() label[i] = gt_class_label.reshape(1, self.num_classes) - return torch.from_numpy(label).float() def forward(self): # self.validate_V1() - for epoch in range(self.epoch, cfg.MAX_EPOCH): + for epoch in range(self.epoch, self.cfg.MAX_EPOCH): self.epoch = epoch - if epoch > cfg.LR_DECAY_START: + if epoch > self.cfg.LR_DECAY_START: self.scheduler.step() # training - self.timer['train time'].tic() + self.timer["train time"].tic() self.train() - self.timer['train time'].toc(average=False) + self.timer["train time"].toc(average=False) - print( 'train time: {:.2f}s'.format(self.timer['train time'].diff) ) - print( '=' * 20 ) + print("train time: {:.2f}s".format(self.timer["train time"].diff)) + print("=" * 20) # validation - if epoch % cfg.VAL_FREQ == 0 or epoch > cfg.VAL_DENSE_START: - self.timer['val time'].tic() - if self.data_mode in ['SHHA', 'SHHB', 'QNRF', 'UCF50']: + if epoch % self.cfg.VAL_FREQ == 0 or epoch > self.cfg.VAL_DENSE_START: + self.timer["val time"].tic() + if self.data_mode in ["SHHA", "SHHB", "QNRF", "UCF50"]: self.validate_V1() - elif self.data_mode is 'WE': + elif self.data_mode == "WE": self.validate_V2() - elif self.data_mode is 'GCC': + elif self.data_mode == "GCC": self.validate_V3() - self.timer['val time'].toc(average=False) - print( 'val time: {:.2f}s'.format(self.timer['val time'].diff) ) + self.timer["val time"].toc(average=False) + print("val time: {:.2f}s".format(self.timer["val time"].diff)) def train(self): # training for all datasets self.net.train() for i, data in enumerate(self.train_loader, 0): # train net - self.timer['iter time'].tic() + self.timer["iter time"].tic() img, gt_map = data img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() gt_label = self.online_assign_gt_class_labels(gt_map) gt_label = Variable(gt_label).cuda() - self.optimizer.zero_grad() pred_map = self.net(img, gt_map, gt_label) - loss1,loss2 = self.net.loss - loss = loss1+loss2 + loss1, loss2 = self.net.loss + loss = loss1 + loss2 # loss = loss1 loss.backward() self.optimizer.step() - if (i + 1) % cfg.PRINT_FREQ == 0: + if (i + 1) % self.cfg.PRINT_FREQ == 0: self.i_tb += 1 - self.writer.add_scalar('train_loss', loss.item(), self.i_tb) - self.writer.add_scalar('train_loss1', loss1.item(), self.i_tb) - self.writer.add_scalar('train_loss2', loss2.item(), self.i_tb) - self.timer['iter time'].toc(average=False) - print( '[ep %d][it %d][loss %.8f, %.8f, %.8f][lr %.4f][%.2fs]' % \ - (self.epoch + 1, i + 1, loss.item(),loss1.item(),loss2.item(), self.optimizer.param_groups[0]['lr'] * 10000, - self.timer['iter time'].diff) ) - print( ' [cnt: gt: %.1f pred: %.2f]' % (gt_map[0].sum().data/self.cfg_data.LOG_PARA, pred_map[0].sum().data/self.cfg_data.LOG_PARA) ) + self.writer.add_scalar("train_loss", loss.item(), self.i_tb) + self.writer.add_scalar("train_loss1", loss1.item(), self.i_tb) + self.writer.add_scalar("train_loss2", loss2.item(), self.i_tb) + self.timer["iter time"].toc(average=False) + print( + "[ep %d][it %d][loss %.8f, %.8f, %.8f][lr %.4f][%.2fs]" + % ( + self.epoch + 1, + i + 1, + loss.item(), + loss1.item(), + loss2.item(), + self.optimizer.param_groups[0]["lr"] * 10000, + self.timer["iter time"].diff, + ) + ) + print( + " [cnt: gt: %.1f pred: %.2f]" + % ( + gt_map[0].sum().data / self.cfg_data.LOG_PARA, + pred_map[0].sum().data / self.cfg_data.LOG_PARA, + ) + ) def validate_V1(self): # validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 @@ -215,25 +238,43 @@ def validate_V1(self): # validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 pred_cnt = np.sum(pred_map) / self.cfg_data.LOG_PARA gt_count = np.sum(gt_map) / self.cfg_data.LOG_PARA - loss1,loss2 = self.net.loss + loss1, loss2 = self.net.loss # loss = loss1.item()+loss2.item() loss = loss1.item() losses.update(loss) maes.update(abs(gt_count - pred_cnt)) mses.update((gt_count - pred_cnt) * (gt_count - pred_cnt)) if vi == 0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) mae = maes.avg mse = np.sqrt(mses.avg) loss = losses.avg - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) print_summary(self.exp_name, [mae, mse, loss], self.train_record) @@ -258,26 +299,43 @@ def validate_V2(self): # validate_V2 for WE pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() - for i_img in range(pred_map.shape[0]): - - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA - losses.update(self.net.loss.item(),i_sub) - maes.update(abs(gt_count-pred_cnt),i_sub) + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA + + losses.update(self.net.loss.item(), i_sub) + maes.update(abs(gt_count - pred_cnt), i_sub) if vi == 0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) mae = np.average(maes.avg) loss = np.average(losses.avg) - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, 0, loss],self.train_record,self.log_txt) + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, 0, loss], + self.train_record, + self.log_txt, + ) print_summary(self.exp_name, [mae, 0, loss], self.train_record) def validate_V3(self): # validate_V3 for GCC @@ -288,8 +346,16 @@ def validate_V3(self): # validate_V3 for GCC maes = AverageMeter() mses = AverageMeter() - c_maes = {'level': AverageCategoryMeter(9), 'time': AverageCategoryMeter(8), 'weather': AverageCategoryMeter(7)} - c_mses = {'level': AverageCategoryMeter(9), 'time': AverageCategoryMeter(8), 'weather': AverageCategoryMeter(7)} + c_maes = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } + c_mses = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } for vi, data in enumerate(self.val_loader, 0): img, gt_map, attributes_pt = data @@ -313,32 +379,48 @@ def validate_V3(self): # validate_V3 for GCC losses.update(self.net.loss.item()) maes.update(s_mae) mses.update(s_mse) - c_maes['level'].update(s_mae, attributes_pt[i_img][0]) - c_mses['level'].update(s_mse, attributes_pt[i_img][0]) - c_maes['time'].update(s_mae, attributes_pt[i_img][1] / 3) - c_mses['time'].update(s_mse, attributes_pt[i_img][1] / 3) - c_maes['weather'].update(s_mae, attributes_pt[i_img][2]) - c_mses['weather'].update(s_mse, attributes_pt[i_img][2]) + c_maes["level"].update(s_mae, attributes_pt[i_img][0]) + c_mses["level"].update(s_mse, attributes_pt[i_img][0]) + c_maes["time"].update(s_mae, attributes_pt[i_img][1] / 3) + c_mses["time"].update(s_mse, attributes_pt[i_img][1] / 3) + c_maes["weather"].update(s_mae, attributes_pt[i_img][2]) + c_mses["weather"].update(s_mse, attributes_pt[i_img][2]) if vi == 0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) loss = losses.avg mae = maes.avg mse = np.sqrt(mses.avg) - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) - - c_mses['level'] = np.sqrt(c_mses['level'].avg) - c_mses['time'] = np.sqrt(c_mses['time'].avg) - c_mses['weather'] = np.sqrt(c_mses['weather'].avg) - print_GCC_summary(self.exp_name, [mae, mse, loss], self.train_record, c_maes, c_mses) - - - - + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) + + c_mses["level"] = np.sqrt(c_mses["level"].avg) + c_mses["time"] = np.sqrt(c_mses["time"].avg) + c_mses["weather"] = np.sqrt(c_mses["weather"].avg) + print_GCC_summary( + self.exp_name, [mae, mse, loss], self.train_record, c_maes, c_mses + ) diff --git a/trainer_for_M2TCC.py b/trainer_for_M2TCC.py index 2fca3b7..6f8a5fa 100644 --- a/trainer_for_M2TCC.py +++ b/trainer_for_M2TCC.py @@ -6,121 +6,143 @@ from torch.optim.lr_scheduler import StepLR from models.M2TCC import CrowdCounter -from config import cfg +from config import cfg as default_cfg from misc.utils import * import pdb -class Trainer(): - def __init__(self, dataloader, cfg_data, pwd): +class Trainer: + def __init__(self, dataloader, cfg_data, pwd, cfg=None): self.cfg_data = cfg_data - - self.data_mode = cfg.DATASET - self.exp_name = cfg.EXP_NAME - self.exp_path = cfg.EXP_PATH + if cfg is None: + self.cfg = default_cfg + else: + self.cfg = cfg + + self.data_mode = self.cfg.DATASET + self.exp_name = self.cfg.EXP_NAME + self.exp_path = self.cfg.EXP_PATH self.pwd = pwd - self.net_name = cfg.NET + self.net_name = self.cfg.NET - if self.net_name in ['SANet']: + if self.net_name in ["SANet"]: loss_1_fn = nn.MSELoss() from misc import pytorch_ssim + loss_2_fn = pytorch_ssim.SSIM(window_size=11) - self.net = CrowdCounter(cfg.GPU_ID,self.net_name,loss_1_fn,loss_2_fn).cuda() - self.optimizer = optim.Adam(self.net.CCN.parameters(), lr=cfg.LR, weight_decay=1e-4) + self.net = CrowdCounter( + self.cfg.GPU_ID, self.net_name, loss_1_fn, loss_2_fn + ).cuda() + self.optimizer = optim.Adam( + self.net.CCN.parameters(), lr=self.cfg.LR, weight_decay=1e-4 + ) # self.optimizer = optim.SGD(self.net.parameters(), cfg.LR, momentum=0.95,weight_decay=5e-4) - self.scheduler = StepLR(self.optimizer, step_size=cfg.NUM_EPOCH_LR_DECAY, gamma=cfg.LR_DECAY) - - self.train_record = {'best_mae': 1e20, 'best_mse':1e20, 'best_model_name': ''} - self.timer = {'iter time' : Timer(),'train time' : Timer(),'val time' : Timer()} + self.scheduler = StepLR( + self.optimizer, + step_size=self.cfg.NUM_EPOCH_LR_DECAY, + gamma=self.cfg.LR_DECAY, + ) + self.train_record = {"best_mae": 1e20, "best_mse": 1e20, "best_model_name": ""} + self.timer = {"iter time": Timer(), "train time": Timer(), "val time": Timer()} self.epoch = 0 self.i_tb = 0 - - if cfg.PRE_GCC: - self.net.load_state_dict(torch.load(cfg.PRE_GCC_MODEL)) - self.train_loader, self.val_loader, self.restore_transform = dataloader() + if self.cfg.PRE_GCC: + self.net.load_state_dict(torch.load(self.cfg.PRE_GCC_MODEL)) - if cfg.RESUME: - latest_state = torch.load(cfg.RESUME_PATH) - self.net.load_state_dict(latest_state['net']) - self.optimizer.load_state_dict(latest_state['optimizer']) - self.scheduler.load_state_dict(latest_state['scheduler']) - self.epoch = latest_state['epoch'] + 1 - self.i_tb = latest_state['i_tb'] - self.train_record = latest_state['train_record'] - self.exp_path = latest_state['exp_path'] - self.exp_name = latest_state['exp_name'] + self.train_loader, self.val_loader, self.restore_transform = dataloader(cfg_data=cfg_data) - self.writer, self.log_txt = logger(self.exp_path, self.exp_name, self.pwd, 'exp', resume=cfg.RESUME) + if self.cfg.RESUME: + latest_state = torch.load(self.cfg.RESUME_PATH) + self.net.load_state_dict(latest_state["net"]) + self.optimizer.load_state_dict(latest_state["optimizer"]) + self.scheduler.load_state_dict(latest_state["scheduler"]) + self.epoch = latest_state["epoch"] + 1 + self.i_tb = latest_state["i_tb"] + self.train_record = latest_state["train_record"] + self.exp_path = latest_state["exp_path"] + self.exp_name = latest_state["exp_name"] + self.writer, self.log_txt = logger( + self.exp_path, self.exp_name, self.pwd, "exp", resume=self.cfg.RESUME + ) def forward(self): # self.validate_V3() - for epoch in range(self.epoch, cfg.MAX_EPOCH): + for epoch in range(self.epoch, self.cfg.MAX_EPOCH): self.epoch = epoch - if epoch > cfg.LR_DECAY_START: + if epoch > self.cfg.LR_DECAY_START: self.scheduler.step() - - # training - self.timer['train time'].tic() + + # training + self.timer["train time"].tic() self.train() - self.timer['train time'].toc(average=False) + self.timer["train time"].toc(average=False) - print( 'train time: {:.2f}s'.format(self.timer['train time'].diff) ) - print( '='*20 ) + print("train time: {:.2f}s".format(self.timer["train time"].diff)) + print("=" * 20) # validation - if epoch%cfg.VAL_FREQ==0 or epoch>cfg.VAL_DENSE_START: - self.timer['val time'].tic() - if self.data_mode in ['SHHA', 'SHHB', 'QNRF', 'UCF50']: + if epoch % self.cfg.VAL_FREQ == 0 or epoch > self.cfg.VAL_DENSE_START: + self.timer["val time"].tic() + if self.data_mode in ["SHHA", "SHHB", "QNRF", "UCF50"]: self.validate_V1() - elif self.data_mode is 'WE': + elif self.data_mode == "WE": self.validate_V2() - elif self.data_mode is 'GCC': + elif self.data_mode == "GCC": self.validate_V3() - self.timer['val time'].toc(average=False) - print( 'val time: {:.2f}s'.format(self.timer['val time'].diff) ) + self.timer["val time"].toc(average=False) + print("val time: {:.2f}s".format(self.timer["val time"].diff)) - - def train(self): # training for all datasets + def train(self): # training for all datasets self.net.train() for i, data in enumerate(self.train_loader, 0): - self.timer['iter time'].tic() + self.timer["iter time"].tic() img, gt_map = data img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() self.optimizer.zero_grad() pred_map = self.net(img, gt_map) - loss1,loss2 = self.net.loss - loss = loss1+loss2 + loss1, loss2 = self.net.loss + loss = loss1 + loss2 loss.backward() self.optimizer.step() - if (i + 1) % cfg.PRINT_FREQ == 0: + if (i + 1) % self.cfg.PRINT_FREQ == 0: self.i_tb += 1 - self.writer.add_scalar('train_loss', loss.item(), self.i_tb) - self.writer.add_scalar('train_loss1', loss1.item(), self.i_tb) - self.writer.add_scalar('train_loss2', loss2.item(), self.i_tb) - self.timer['iter time'].toc(average=False) - print( '[ep %d][it %d][loss %.4f][lr %.4f][%.2fs]' % \ - (self.epoch + 1, i + 1, loss.item(), self.optimizer.param_groups[0]['lr']*10000, self.timer['iter time'].diff) ) - print( ' [cnt: gt: %.1f pred: %.2f]' % (gt_map[0].sum().data/self.cfg_data.LOG_PARA, pred_map[0].sum().data/self.cfg_data.LOG_PARA) ) - - - - - - def validate_V1(self):# validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 + self.writer.add_scalar("train_loss", loss.item(), self.i_tb) + self.writer.add_scalar("train_loss1", loss1.item(), self.i_tb) + self.writer.add_scalar("train_loss2", loss2.item(), self.i_tb) + self.timer["iter time"].toc(average=False) + print( + "[ep %d][it %d][loss %.4f][lr %.4f][%.2fs]" + % ( + self.epoch + 1, + i + 1, + loss.item(), + self.optimizer.param_groups[0]["lr"] * 10000, + self.timer["iter time"].diff, + ) + ) + print( + " [cnt: gt: %.1f pred: %.2f]" + % ( + gt_map[0].sum().data / self.cfg_data.LOG_PARA, + pred_map[0].sum().data / self.cfg_data.LOG_PARA, + ) + ) + + def validate_V1(self): # validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 self.net.eval() - + losses = AverageMeter() maes = AverageMeter() mses = AverageMeter() @@ -128,50 +150,65 @@ def validate_V1(self):# validate_V1 for SHHA, SHHB, UCF-QNRF, UCF50 for vi, data in enumerate(self.val_loader, 0): img, gt_map = data - with torch.no_grad(): img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() - + for i_img in range(pred_map.shape[0]): - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA - loss1,loss2 = self.net.loss - loss = loss1.item()+loss2.item() + loss1, loss2 = self.net.loss + loss = loss1.item() + loss2.item() losses.update(loss) - maes.update(abs(gt_count-pred_cnt)) - mses.update((gt_count-pred_cnt)*(gt_count-pred_cnt)) - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - + maes.update(abs(gt_count - pred_cnt)) + mses.update((gt_count - pred_cnt) * (gt_count - pred_cnt)) + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + mae = maes.avg mse = np.sqrt(mses.avg) loss = losses.avg - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) - print_summary(self.exp_name,[mae, mse, loss],self.train_record) - - - def validate_V2(self):# validate_V2 for WE + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) + print_summary(self.exp_name, [mae, mse, loss], self.train_record) + + def validate_V2(self): # validate_V2 for WE self.net.eval() losses = AverageCategoryMeter(5) maes = AverageCategoryMeter(5) - - for i_sub,i_loader in enumerate(self.val_loader,0): + for i_sub, i_loader in enumerate(self.val_loader, 0): for vi, data in enumerate(i_loader, 0): img, gt_map = data @@ -180,43 +217,66 @@ def validate_V2(self):# validate_V2 for WE img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() - - + for i_img in range(pred_map.shape[0]): - pred_cnt = np.sum(pred_map[i_img])/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map[i_img])/self.cfg_data.LOG_PARA - - losses.update(self.net.loss.item(),i_sub) - maes.update(abs(gt_count-pred_cnt),i_sub) - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - + pred_cnt = np.sum(pred_map[i_img]) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map[i_img]) / self.cfg_data.LOG_PARA + + losses.update(self.net.loss.item(), i_sub) + maes.update(abs(gt_count - pred_cnt), i_sub) + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + mae = np.average(maes.avg) loss = np.average(losses.avg) - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, 0, loss],self.train_record,self.log_txt) - print_summary(self.exp_name,[mae, 0, loss],self.train_record) - - - def validate_V3(self):# validate_V3 for GCC + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, 0, loss], + self.train_record, + self.log_txt, + ) + print_summary(self.exp_name, [mae, 0, loss], self.train_record) + + def validate_V3(self): # validate_V3 for GCC self.net.eval() - + losses = AverageMeter() maes = AverageMeter() mses = AverageMeter() - c_maes = {'level':AverageCategoryMeter(9), 'time':AverageCategoryMeter(8),'weather':AverageCategoryMeter(7)} - c_mses = {'level':AverageCategoryMeter(9), 'time':AverageCategoryMeter(8),'weather':AverageCategoryMeter(7)} - + c_maes = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } + c_mses = { + "level": AverageCategoryMeter(9), + "time": AverageCategoryMeter(8), + "weather": AverageCategoryMeter(7), + } for vi, data in enumerate(self.val_loader, 0): img, gt_map, attributes_pt = data @@ -225,46 +285,68 @@ def validate_V3(self):# validate_V3 for GCC img = Variable(img).cuda() gt_map = Variable(gt_map).cuda() - - pred_map = self.net.forward(img,gt_map) + pred_map = self.net.forward(img, gt_map) pred_map = pred_map.data.cpu().numpy() gt_map = gt_map.data.cpu().numpy() - + for i_img in range(pred_map.shape[0]): - pred_cnt = np.sum(pred_map)/self.cfg_data.LOG_PARA - gt_count = np.sum(gt_map)/self.cfg_data.LOG_PARA + pred_cnt = np.sum(pred_map) / self.cfg_data.LOG_PARA + gt_count = np.sum(gt_map) / self.cfg_data.LOG_PARA - s_mae = abs(gt_count-pred_cnt) - s_mse = (gt_count-pred_cnt)*(gt_count-pred_cnt) + s_mae = abs(gt_count - pred_cnt) + s_mse = (gt_count - pred_cnt) * (gt_count - pred_cnt) - loss1,loss2 = self.net.loss - loss = loss1.item()+loss2.item() + loss1, loss2 = self.net.loss + loss = loss1.item() + loss2.item() losses.update(loss) maes.update(s_mae) - mses.update(s_mse) - attributes_pt = attributes_pt.squeeze() - c_maes['level'].update(s_mae,attributes_pt[0]) - c_mses['level'].update(s_mse,attributes_pt[0]) - c_maes['time'].update(s_mae,attributes_pt[1]/3) - c_mses['time'].update(s_mse,attributes_pt[1]/3) - c_maes['weather'].update(s_mae,attributes_pt[2]) - c_mses['weather'].update(s_mse,attributes_pt[2]) - - - if vi==0: - vis_results(self.exp_name, self.epoch, self.writer, self.restore_transform, img, pred_map, gt_map) - + mses.update(s_mse) + attributes_pt = attributes_pt.squeeze() + c_maes["level"].update(s_mae, attributes_pt[0]) + c_mses["level"].update(s_mse, attributes_pt[0]) + c_maes["time"].update(s_mae, attributes_pt[1] / 3) + c_mses["time"].update(s_mse, attributes_pt[1] / 3) + c_maes["weather"].update(s_mae, attributes_pt[2]) + c_mses["weather"].update(s_mse, attributes_pt[2]) + + if vi == 0: + vis_results( + self.exp_name, + self.epoch, + self.writer, + self.restore_transform, + img, + pred_map, + gt_map, + ) + loss = losses.avg mae = maes.avg mse = np.sqrt(mses.avg) - - self.writer.add_scalar('val_loss', loss, self.epoch + 1) - self.writer.add_scalar('mae', mae, self.epoch + 1) - self.writer.add_scalar('mse', mse, self.epoch + 1) - - self.train_record = update_model(self.net,self.optimizer,self.scheduler,self.epoch,self.i_tb,self.exp_path,self.exp_name, \ - [mae, mse, loss],self.train_record,self.log_txt) - - print_GCC_summary(self.log_txt,self.epoch,[mae, mse, loss],self.train_record,c_maes,c_mses) \ No newline at end of file + self.writer.add_scalar("val_loss", loss, self.epoch + 1) + self.writer.add_scalar("mae", mae, self.epoch + 1) + self.writer.add_scalar("mse", mse, self.epoch + 1) + + self.train_record = update_model( + self.net, + self.optimizer, + self.scheduler, + self.epoch, + self.i_tb, + self.exp_path, + self.exp_name, + [mae, mse, loss], + self.train_record, + self.log_txt, + ) + + print_GCC_summary( + self.log_txt, + self.epoch, + [mae, mse, loss], + self.train_record, + c_maes, + c_mses, + ) diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/utils/__init__.py @@ -0,0 +1 @@ + diff --git a/utils/helper.py b/utils/helper.py new file mode 100644 index 0000000..c8726eb --- /dev/null +++ b/utils/helper.py @@ -0,0 +1,41 @@ +import os + +class Save_Handle(object): + """handle the number of """ + def __init__(self, max_num): + self.save_list = [] + self.max_num = max_num + + def append(self, save_path): + if len(self.save_list) < self.max_num: + self.save_list.append(save_path) + else: + remove_path = self.save_list[0] + del self.save_list[0] + self.save_list.append(save_path) + if os.path.exists(remove_path): + os.remove(remove_path) + + +class AverageMeter(object): + """Computes and stores the average and current value""" + def __init__(self): + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = 1.0 * self.sum / self.count + + def get_avg(self): + return self.avg + + def get_count(self): + return self.count diff --git a/utils/logger.py b/utils/logger.py new file mode 100644 index 0000000..9a0dc52 --- /dev/null +++ b/utils/logger.py @@ -0,0 +1,16 @@ +import logging + +def setlogger(path): + logger = logging.getLogger() + logger.setLevel(logging.INFO) + logFormatter = logging.Formatter("%(asctime)s %(message)s", + "%m-%d %H:%M:%S") + + fileHandler = logging.FileHandler(path) + fileHandler.setFormatter(logFormatter) + logger.addHandler(fileHandler) + + consoleHandler = logging.StreamHandler() + consoleHandler.setFormatter(logFormatter) + logger.addHandler(consoleHandler) + diff --git a/utils/regression_trainer.py b/utils/regression_trainer.py new file mode 100644 index 0000000..2937bcd --- /dev/null +++ b/utils/regression_trainer.py @@ -0,0 +1,165 @@ +from utils.trainer import Trainer +from utils.helper import Save_Handle, AverageMeter +import os +import sys +import time +import torch +from torch import optim +from torch.utils.data import DataLoader +from torch.utils.data.dataloader import default_collate +import logging +import numpy as np +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) +from models.vgg import vgg19 +from datasets.crowd_sh import Crowd +from losses.bay_loss import Bay_Loss +from losses.post_prob import Post_Prob + + +def train_collate(batch): + transposed_batch = list(zip(*batch)) + images = torch.stack(transposed_batch[0], 0) + points = transposed_batch[1] # the number of points is not fixed, keep it as a list of tensor + targets = transposed_batch[2] + st_sizes = torch.FloatTensor(transposed_batch[3]) + return images, points, targets, st_sizes + + +class RegTrainer(Trainer): + def setup(self): + """initial the datasets, model, loss and optimizer""" + args = self.args + if torch.cuda.is_available(): + self.device = torch.device("cuda") + self.device_count = torch.cuda.device_count() + # for code conciseness, we release the single gpu version + assert self.device_count == 1 + logging.info('using {} gpus'.format(self.device_count)) + else: + raise Exception("gpu is not available") + + self.downsample_ratio = args.downsample_ratio + self.datasets = {x: Crowd(os.path.join(args.data_dir, x), + args.crop_size, + args.downsample_ratio, + args.is_gray, x) for x in ['train', 'val']} + self.dataloaders = {x: DataLoader(self.datasets[x], + collate_fn=(train_collate + if x == 'train' else default_collate), + batch_size=(args.batch_size + if x == 'train' else 1), + shuffle=(True if x == 'train' else False), + num_workers=args.num_workers*self.device_count, + pin_memory=(True if x == 'train' else False)) + for x in ['train', 'val']} + self.model =vgg19() + self.model.to(self.device) + self.optimizer = optim.Adam(self.model.parameters(), lr=args.lr, weight_decay=args.weight_decay) + + self.start_epoch = 0 + if args.resume: + suf = args.resume.rsplit('.', 1)[-1] + if suf == 'tar': + checkpoint = torch.load(args.resume, self.device) + self.model.load_state_dict(checkpoint['model_state_dict']) + self.optimizer.load_state_dict(checkpoint['optimizer_state_dict']) + self.start_epoch = checkpoint['epoch'] + 1 + elif suf == 'pth': + self.model.load_state_dict(torch.load(args.resume, self.device)) + + self.post_prob = Post_Prob(args.sigma, + args.crop_size, + args.downsample_ratio, + args.background_ratio, + args.use_background, + self.device) + self.criterion = Bay_Loss(args.use_background, self.device) + self.save_list = Save_Handle(max_num=args.max_model_num) + self.best_mae = np.inf + self.best_mse = np.inf + self.best_count = 0 + + def train(self): + """training process""" + args = self.args + for epoch in range(self.start_epoch, args.max_epoch): + logging.info('-'*5 + 'Epoch {}/{}'.format(epoch, args.max_epoch - 1) + '-'*5) + self.epoch = epoch + self.train_eopch() + if epoch % args.val_epoch == 0 and epoch >= args.val_start: + self.val_epoch() + + def train_eopch(self): + epoch_loss = AverageMeter() + epoch_mae = AverageMeter() + epoch_mse = AverageMeter() + epoch_start = time.time() + self.model.train() # Set model to training mode + + # Iterate over data. + for step, (inputs, points, targets, st_sizes) in enumerate(self.dataloaders['train']): + inputs = inputs.to(self.device) + st_sizes = st_sizes.to(self.device) + gd_count = np.array([len(p) for p in points], dtype=np.float32) + points = [p.to(self.device) for p in points] + targets = [t.to(self.device) for t in targets] + + with torch.set_grad_enabled(True): + outputs = self.model(inputs) + prob_list = self.post_prob(points, st_sizes) + loss = self.criterion(prob_list, targets, outputs) + + self.optimizer.zero_grad() + loss.backward() + self.optimizer.step() + + N = inputs.size(0) + pre_count = torch.sum(outputs.view(N, -1), dim=1).detach().cpu().numpy() + res = pre_count - gd_count + epoch_loss.update(loss.item(), N) + epoch_mse.update(np.mean(res * res), N) + epoch_mae.update(np.mean(abs(res)), N) + + logging.info('Epoch {} Train, Loss: {:.2f}, MSE: {:.2f} MAE: {:.2f}, Cost {:.1f} sec' + .format(self.epoch, epoch_loss.get_avg(), np.sqrt(epoch_mse.get_avg()), epoch_mae.get_avg(), + time.time()-epoch_start)) + model_state_dic = self.model.state_dict() + save_path = os.path.join(self.save_dir, '{}_ckpt.tar'.format(self.epoch)) + torch.save({ + 'epoch': self.epoch, + 'optimizer_state_dict': self.optimizer.state_dict(), + 'model_state_dict': model_state_dic + }, save_path) + self.save_list.append(save_path) # control the number of saved models + + def val_epoch(self): + epoch_start = time.time() + self.model.eval() # Set model to evaluate mode + epoch_res = [] + # Iterate over data. + for inputs, count, name in self.dataloaders['val']: + inputs = inputs.to(self.device) + # inputs are images with different sizes + assert inputs.size(0) == 1, 'the batch size should equal to 1 in validation mode' + with torch.set_grad_enabled(False): + outputs = self.model(inputs) + res = count[0].item() - torch.sum(outputs).item() + epoch_res.append(res) + + epoch_res = np.array(epoch_res) + mse = np.sqrt(np.mean(np.square(epoch_res))) + mae = np.mean(np.abs(epoch_res)) + logging.info('Epoch {} Val, MSE: {:.2f} MAE: {:.2f}, Cost {:.1f} sec' + .format(self.epoch, mse, mae, time.time()-epoch_start)) + + model_state_dic = self.model.state_dict() + if (2.0 * mse + mae) < (2.0 * self.best_mse + self.best_mae): + self.best_mse = mse + self.best_mae = mae + logging.info("save best mse {:.2f} mae {:.2f} model epoch {}".format(self.best_mse, + self.best_mae, + self.epoch)) + torch.save(model_state_dic, os.path.join(self.save_dir, 'best_model.pth')) + + + diff --git a/utils/trainer.py b/utils/trainer.py new file mode 100644 index 0000000..dbd6ab1 --- /dev/null +++ b/utils/trainer.py @@ -0,0 +1,24 @@ +import os +import logging +from datetime import datetime +from utils.logger import setlogger + + +class Trainer(object): + def __init__(self, args): + sub_dir = datetime.strftime(datetime.now(), '%m%d-%H%M%S') # prepare saving path + self.save_dir = os.path.join(args.save_dir, sub_dir) + if not os.path.exists(self.save_dir): + os.makedirs(self.save_dir) + setlogger(os.path.join(self.save_dir, 'train.log')) # set logger + for k, v in args.__dict__.items(): # save args + logging.info("{}: {}".format(k, v)) + self.args = args + + def setup(self): + """initial the datasets, model, loss and optimizer""" + pass + + def train(self): + """training one epoch""" + pass